diff --git a/.secrets.baseline b/.secrets.baseline index 43d4c0cc2..2313a5582 100644 --- a/.secrets.baseline +++ b/.secrets.baseline @@ -20,6 +20,9 @@ { "name": "CloudantDetector" }, + { + "name": "DiscordBotTokenDetector" + }, { "name": "GitHubTokenDetector" }, @@ -72,10 +75,6 @@ { "path": "detect_secrets.filters.allowlist.is_line_allowlisted" }, - { - "path": "detect_secrets.filters.common.is_baseline_file", - "filename": ".secrets.baseline" - }, { "path": "detect_secrets.filters.common.is_ignored_due_to_verification_policies", "min_level": 2 @@ -106,12 +105,6 @@ }, { "path": "detect_secrets.filters.heuristic.is_templated_secret" - }, - { - "path": "detect_secrets.filters.regex.should_exclude_file", - "pattern": [ - "poetry.lock" - ] } ], "results": { @@ -133,6 +126,15 @@ "line_number": 7 } ], + "docs/fence_multifactor_authentication_guide.md": [ + { + "type": "Secret Keyword", + "filename": "docs/fence_multifactor_authentication_guide.md", + "hashed_secret": "0f674908b6342fcf2a9842d04699cb008d57d399", + "is_verified": false, + "line_number": 38 + } + ], "fence/blueprints/storage_creds/google.py": [ { "type": "Private Key", @@ -315,6 +317,15 @@ "line_number": 49 } ], + "tests/login/test_idp_oauth2.py": [ + { + "type": "Secret Keyword", + "filename": "tests/login/test_idp_oauth2.py", + "hashed_secret": "f3bbbd66a63d4bf1747940578ec3d0103530e21d", + "is_verified": false, + "line_number": 8 + } + ], "tests/migrations/test_a04a70296688.py": [ { "type": "Hex High Entropy String", @@ -368,5 +379,5 @@ } ] }, - "generated_at": "2023-09-01T18:49:42Z" + "generated_at": "2023-09-01T19:02:21Z" } diff --git a/.travis.yml b/.travis.yml index 239ea50bb..9dfb763ba 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,5 +1,5 @@ language: python - +dist: jammy python: - "3.9" @@ -7,8 +7,26 @@ sudo: false cache: pip +services: + - postgresql + addons: - postgresql: "9.6" + postgresql: "13" + apt: + sources: + - sourceline: deb http://apt.postgresql.org/pub/repos/apt/ jammy-pgdg main + 13 + key_url: https://www.postgresql.org/media/keys/ACCC4CF8.asc + packages: + - postgresql-13 + +before_install: + # Copy custom configs from the repo because PG-13 isn't set up to run like + # it normally does on Travis out of the box. + # Source: https://github.com/NCI-GDC/psqlgraph/blob/94f315db2c039217752cba85d9c63988f2059317/.travis.yml + - sudo cp travis/postgresql.conf /etc/postgresql/13/main/postgresql.conf + - sudo cp travis/pg_hba.conf /etc/postgresql/13/main/pg_hba.conf + - sudo pg_ctlcluster 13 main restart install: - pip install --upgrade pip diff --git a/docs/fence_multifactor_authentication_guide.md b/docs/fence_multifactor_authentication_guide.md new file mode 100644 index 000000000..f0f8cea24 --- /dev/null +++ b/docs/fence_multifactor_authentication_guide.md @@ -0,0 +1,64 @@ +# Fence Multifactor Authentication Guide + +Fence is capable of using token claims from IdPs to identify when multifactor authentication (MFA) was used during the authentication process. + +## File Level Enforcement +To restrict access to files to user who've authenticated with MFA, the following resource *MUST* be present in the indexd record's `authz`: +`/multifactor_auth` + +And the following configs must be updated: +- fence-config.yaml +- user.yaml + +### fence-config.yaml changes + +MFA claim checking is configured on a per-IdP basis. For a given IdP, define the name of the claim in the id_token and is possible values that indicate MFA. If the id_token claim value matches at least one value in the configured multifactor_auth_claim_info.values, then "/multifactor_auth" resource will be assigned to the user. + +For example, Okta may issue the following id_token when MFA is used: +``` +{ + "amr": ["otp", "pwd"], + "aud": "6joRGIzNCaJfdCPzRjlh", + "auth_time": 1311280970, + "exp": 1311280970, + "iat": 1311280970, + "idp": "00ok1u7AsAkrwdZL3z0g3", + "iss": "https://$" + "jti": "Tlenfse93dgkaksginv", + "sub": "00uk1u7AsAk6dZL3z0g3", + "ver": 1 +} +``` + +And fence-config.yaml is configured as follows: +``` +OPENID_CONNECT: + okta: + client_id: 'redacted' + client_secret: 'redacted' + multifactor_auth_claim_info: + claim: 'amr' + values: [ "mfa", "otp", "sms" ] +``` + +Then fence will assign the "/multifactor_auth" resource to the user in Arborist. + +### user.yaml changes +The `mfa_policy` policy and `multifactor_auth` resource must be added to user.yaml so the appropriate policy and resource are created in arborist when usersync runs. + +NOTE: The role_ids provided here are an example and should be changed to the appropriate arborist roles for the commons. + +Add the following to the `resources` section: +```yaml + - name: multifactor_auth +``` + +Add the following the `policies` section: +```yaml +- id: mfa_policy + role_ids: + - read-storage + - read + resource_paths: + - /multifactor_auth +``` diff --git a/fence/blueprints/link.py b/fence/blueprints/link.py index 944909e51..99ac4c9fa 100644 --- a/fence/blueprints/link.py +++ b/fence/blueprints/link.py @@ -274,7 +274,7 @@ def get(self): code = flask.request.args.get("code") if not config.get("MOCK_GOOGLE_AUTH", False): - google_response = flask.current_app.google_client.get_user_id(code) + google_response = flask.current_app.google_client.get_auth_info(code) email = google_response.get("email") else: # if we're mocking google auth, mock response to include the email diff --git a/fence/blueprints/login/base.py b/fence/blueprints/login/base.py index 8827845bb..0b6ae3f95 100644 --- a/fence/blueprints/login/base.py +++ b/fence/blueprints/login/base.py @@ -1,4 +1,5 @@ import flask +from cdislogging import get_logger from flask_restful import Resource from urllib.parse import urlparse, urlencode, parse_qsl @@ -7,6 +8,8 @@ from fence.config import config from fence.errors import UserError +logger = get_logger(__name__) + class DefaultOAuth2Login(Resource): def __init__(self, idp_name, client): @@ -63,6 +66,7 @@ def __init__( username_field="email", email_field="email", id_from_idp_field="sub", + app=flask.current_app, ): """ Construct a resource for a login callback endpoint @@ -84,6 +88,10 @@ def __init__( self.username_field = username_field self.email_field = email_field self.id_from_idp_field = id_from_idp_field + self.is_mfa_enabled = "multifactor_auth_claim_info" in config[ + "OPENID_CONNECT" + ].get(self.idp_name, {}) + self.app = app def get(self): # Check if user granted access @@ -109,7 +117,7 @@ def get(self): return flask.redirect(location=final_redirect_url) code = flask.request.args.get("code") - result = self.client.get_user_id(code) + result = self.client.get_auth_info(code) username = result.get(self.username_field) if not username: raise UserError( @@ -125,6 +133,22 @@ def get(self): def post_login(self, user=None, token_result=None, **kwargs): prepare_login_log(self.idp_name) + if token_result: + username = token_result.get(self.username_field) + if self.is_mfa_enabled: + if token_result.get("mfa"): + logger.info(f"Adding mfa_policy for {username}") + self.app.arborist.grant_user_policy( + username=username, + policy_id="mfa_policy", + ) + return + else: + logger.info(f"Revoking mfa_policy for {username}") + self.app.arborist.revoke_user_policy( + username=username, + policy_id="mfa_policy", + ) def prepare_login_log(idp_name): diff --git a/fence/blueprints/login/ras.py b/fence/blueprints/login/ras.py index 7a9470a8a..31bd7a340 100644 --- a/fence/blueprints/login/ras.py +++ b/fence/blueprints/login/ras.py @@ -114,4 +114,4 @@ def post_login(self, user=None, token_result=None, id_from_idp=None): user=user, refresh_token=refresh_token, expires=expires + issued_time ) - super(RASCallback, self).post_login(id_from_idp=id_from_idp) + super(RASCallback, self).post_login(token_result=token_result) diff --git a/fence/config-default.yaml b/fence/config-default.yaml index 955ada6f7..89525ede2 100755 --- a/fence/config-default.yaml +++ b/fence/config-default.yaml @@ -111,6 +111,9 @@ OPENID_CONNECT: user_id_field: '' # optional (default "sub"); claims field to get the user_id from email_field: '' # optional (default "email"); claims field to get the user email from scope: '' # optional (default "openid") + multifactor_auth_claim_info: # optional, include if you're using arborist to enforce mfa on a per-file level + claim: '' # claims field that indicates mfa, either the acr or acm claim. + values: [ "" ] # possible values that indicate mfa was used. At least one value configured here is required to be in the token # These Google values must be obtained from Google's Cloud Console # Follow: https://developers.google.com/identity/protocols/OpenIDConnect # @@ -181,6 +184,9 @@ OPENID_CONNECT: client_secret: '' redirect_url: '{{BASE_URL}}/login/ras/callback' scope: 'openid email profile ga4gh_passport_v1' +# multifactor_auth_claim_info: +# claim: 'acr' +# values: [ 'https://stsstg.nih.gov/assurance/aal/2' ] # if mock is true, will fake a successful login response for login # WARNING: DO NOT ENABLE IN PRODUCTION (for testing purposes only) mock: false @@ -207,6 +213,9 @@ OPENID_CONNECT: # WARNING: DO NOT ENABLE IN PRODUCTION (for testing purposes only) mock: false mock_default_user: 'test@example.com' + # multifactor_auth_claim_info: + # claim: 'amr' + # values: [ "mfa", "otp", "rsa", "ngcmfa", "wiaormfa" ] # For information on configuring an Okta tenant as an OIDC IdP refer to Okta documentation at: # https://developer.okta.com/docs/reference/api/oidc/#2-okta-as-the-identity-platform-for-your-app-or-api okta: @@ -215,6 +224,9 @@ OPENID_CONNECT: client_secret: '' redirect_url: '{{BASE_URL}}/login/okta/login/' scope: 'openid email' + # multifactor_auth_claim_info: + # claim: 'amr' + # values: [ "mfa", "otp", "sms" ] cognito: # You must create a user pool in order to have a discovery url discovery_url: 'https://cognito-idp.{REGION}.amazonaws.com/{USER-POOL-ID}/.well-known/openid-configuration' @@ -241,6 +253,9 @@ OPENID_CONNECT: # WARNING: DO NOT ENABLE IN PRODUCTION (for testing purposes only) mock: false mock_default_user: 'http://cilogon.org/serverT/users/64703' + # multifactor_auth_claim_info: + # claim: 'acr' + # values: [ "https://refeds.org/profile/mfa" ] synapse: discovery_url: '' client_id: '' @@ -532,6 +547,25 @@ dbGaP: # # NOTE: when this is "false" the above would become "phs000123" parse_consent_code: true + # When a dbGaP study authorizes access to child studies through a parent study ID, + # you can use this mapping. When a user gets access to the first ID, they automatically + # get access to the list of projects to the right. + # + # There's usually a note in the "Authorized Access" section of the dbGaP study page + # (https://www.ncbi.nlm.nih.gov/projects/gap/cgi-bin/study.cgi?study_id=phs001843.v1.p2) + # along the lines of: + # Note: The data for this study is collected as a substudy of + # phs001194.v3.p2. dbGaP Authorized Access requests for + # this data should be made for study phs001194.v3.p2 and + # not phs001843.v1.p2 + # + # There are also other dbGaP APIs that expose this parent/child mapping. + # Example: https://dbgap.ncbi.nlm.nih.gov/ss/dbgapssws.cgi?request=Study&phs=000571&v=6 + # + # If `parse_consent_code` is true, then a user will be given access to the exact + # same consent codes in the child studies + parent_to_child_studies_mapping: + # 'phs001194': ['phs000571', 'phs001843'] # A consent of "c999" can indicate access to that study's "exchange area data" # and when a user has access to one study's exchange area data, they # have access to the parent study's "common exchange area data" that is not study diff --git a/fence/config.py b/fence/config.py index ea085e50f..b4deeeb22 100644 --- a/fence/config.py +++ b/fence/config.py @@ -130,18 +130,47 @@ def post_process(self): "BILLING_PROJECT_FOR_SA_CREDS or BILLING_PROJECT_FOR_SIGNED_URLS is set to a non-None value. " "SESSION_ALLOWED_SCOPES includes `google_credentials`. Removing " "`google_credentials` from USER_ALLOWED_SCOPES as this could allow " - "end-users to indescriminently bill our default project. Clients are inheritently " + "end-users to indiscriminately bill our default project. Clients are inherently " "trusted, so we do not restrict this scope for clients." ) self._configs["SESSION_ALLOWED_SCOPES"].remove("google_credentials") if ( not self._configs["ENABLE_VISA_UPDATE_CRON"] - and self._configs["GLOBAL_PARSE_VISAS_ON_LOGIN"] != False + and self._configs["GLOBAL_PARSE_VISAS_ON_LOGIN"] is not False ): raise Exception( "Visa parsing on login is enabled but `ENABLE_VISA_UPDATE_CRON` is disabled!" ) + for idp_id, idp in self._configs.get("OPENID_CONNECT", {}).items(): + mfa_info = idp.get("multifactor_auth_claim_info") + if mfa_info and mfa_info["claim"] not in ["amr", "acr"]: + logger.warning( + f"IdP '{idp_id}' is using multifactor_auth_claim_info '{mfa_info['claim']}', which is neither AMR or ACR. Unable to determine if a user used MFA. Fence will continue and assume they have not used MFA." + ) + + self._validate_parent_child_studies(self._configs["dbGaP"]) + + @staticmethod + def _validate_parent_child_studies(dbgap_configs): + if isinstance(dbgap_configs, list): + configs = dbgap_configs + else: + configs = [dbgap_configs] + + all_parent_studies = set() + for dbgap_config in configs: + parent_studies = dbgap_config.get( + "parent_to_child_studies_mapping", {} + ).keys() + conflicts = parent_studies & all_parent_studies + if len(conflicts) > 0: + raise Exception( + f"{conflicts} are duplicate parent study ids found in parent_to_child_studies_mapping for " + f"multiple dbGaP configurations." + ) + all_parent_studies.update(parent_studies) + config = FenceConfig(DEFAULT_CFG_PATH) diff --git a/fence/resources/openid/cilogon_oauth2.py b/fence/resources/openid/cilogon_oauth2.py index 50d79c9c7..163663420 100644 --- a/fence/resources/openid/cilogon_oauth2.py +++ b/fence/resources/openid/cilogon_oauth2.py @@ -31,7 +31,7 @@ def get_auth_url(self): return uri - def get_user_id(self, code): + def get_auth_info(self, code): try: token_endpoint = self.get_value_from_discovery_doc( "token_endpoint", "https://cilogon.org/oauth2/token" diff --git a/fence/resources/openid/cognito_oauth2.py b/fence/resources/openid/cognito_oauth2.py index 883d94c5c..73038c87f 100644 --- a/fence/resources/openid/cognito_oauth2.py +++ b/fence/resources/openid/cognito_oauth2.py @@ -37,7 +37,7 @@ def get_auth_url(self): return uri - def get_user_id(self, code): + def get_auth_info(self, code): """ Exchange code for tokens, get email from id token claims. Return dict with "email" field on success OR "error" field on error. diff --git a/fence/resources/openid/google_oauth2.py b/fence/resources/openid/google_oauth2.py index 10fdc41d3..b396fe9ca 100644 --- a/fence/resources/openid/google_oauth2.py +++ b/fence/resources/openid/google_oauth2.py @@ -34,7 +34,7 @@ def get_auth_url(self): return uri - def get_user_id(self, code): + def get_auth_info(self, code): """ Get user id """ diff --git a/fence/resources/openid/idp_oauth2.py b/fence/resources/openid/idp_oauth2.py index 80169ba4b..c2e497085 100644 --- a/fence/resources/openid/idp_oauth2.py +++ b/fence/resources/openid/idp_oauth2.py @@ -154,7 +154,7 @@ def get_auth_url(self): ) return uri - def get_user_id(self, code): + def get_auth_info(self, code): """ Exchange code for tokens, get user_id from id token claims. Return dictionary with necessary field(s) for successfully logged in @@ -169,7 +169,10 @@ def get_user_id(self, code): if claims.get(user_id_field): if user_id_field == "email" and not claims.get("email_verified"): return {"error": "Email is not verified"} - return {user_id_field: claims[user_id_field]} + return { + user_id_field: claims[user_id_field], + "mfa": self.has_mfa_claim(claims), + } else: self.logger.exception( f"Can't get {user_id_field} from claims: {claims}" @@ -220,6 +223,43 @@ def get_access_token(self, user, token_endpoint, db_session=None): return token_response + def has_mfa_claim(self, decoded_id_token): + """ + Determines if the claim denoting whether multifactor authentication was used is contained within the claims + of the provided id_token. + + Parameters: + - decoded_id_token (dict): The decoded id_token, a dict of claims -> claim values. + + """ + mfa_claim_info = self.settings.get("multifactor_auth_claim_info") + if not mfa_claim_info: + return False + claim_name = mfa_claim_info.get("claim") + mfa_values = mfa_claim_info.get("values") + if not claim_name or not mfa_values: + self.logger.warning( + f"{self.idp} has a configured multifactor_auth_claim_info with a missing claim name " + f"and values. Please check the OPENID_CONNECT settings for {self.idp} in the fence " + f"config yaml." + ) + return False + mfa_claims = [] + if claim_name == "amr": + mfa_claims = decoded_id_token.get(claim_name, []) + elif claim_name == "acr": + mfa_claims = decoded_id_token.get(claim_name, "").split(" ") + else: + self.logger.error( + f"{claim_name} is neither AMR or ACR - cannot determine if MFA was used" + ) + return False + + self.logger.info( + f"Comparing token's {claim_name} claims: {mfa_claims} to mfa values {mfa_values}" + ) + return len(set(mfa_claims) & set(mfa_values)) > 0 + def store_refresh_token(self, user, refresh_token, expires, db_session=None): """ Store refresh token in db. diff --git a/fence/resources/openid/microsoft_oauth2.py b/fence/resources/openid/microsoft_oauth2.py index 82bb2cc4d..916a4a2b1 100755 --- a/fence/resources/openid/microsoft_oauth2.py +++ b/fence/resources/openid/microsoft_oauth2.py @@ -35,7 +35,7 @@ def get_auth_url(self): return uri - def get_user_id(self, code): + def get_auth_info(self, code): """ Get user id given an authorization code """ diff --git a/fence/resources/openid/okta_oauth2.py b/fence/resources/openid/okta_oauth2.py index 6ce062420..572031623 100644 --- a/fence/resources/openid/okta_oauth2.py +++ b/fence/resources/openid/okta_oauth2.py @@ -27,7 +27,7 @@ def get_auth_url(self): return uri - def get_user_id(self, code): + def get_auth_info(self, code): try: token_endpoint = self.get_value_from_discovery_doc( "token_endpoint", diff --git a/fence/resources/openid/orcid_oauth2.py b/fence/resources/openid/orcid_oauth2.py index 7388f7f83..ee8711f33 100644 --- a/fence/resources/openid/orcid_oauth2.py +++ b/fence/resources/openid/orcid_oauth2.py @@ -33,7 +33,7 @@ def get_auth_url(self): return uri - def get_user_id(self, code): + def get_auth_info(self, code): try: token_endpoint = self.get_value_from_discovery_doc( "token_endpoint", "https://orcid.org/oauth/token" diff --git a/fence/resources/openid/ras_oauth2.py b/fence/resources/openid/ras_oauth2.py index c894e5d36..b94897b87 100644 --- a/fence/resources/openid/ras_oauth2.py +++ b/fence/resources/openid/ras_oauth2.py @@ -117,7 +117,7 @@ def get_encoded_visas_v11_userinfo(self, userinfo, pkey_cache=None): ) ) - def get_user_id(self, code): + def get_auth_info(self, code): err_msg = "Unable to parse UserID from RAS userinfo response" try: @@ -189,6 +189,7 @@ def get_user_id(self, code): "username": username, "email": userinfo.get("email"), "sub": userinfo.get("sub"), + "mfa": self.has_mfa_claim(claims), } def map_iss_sub_pair_to_user( diff --git a/fence/resources/openid/synapse_oauth2.py b/fence/resources/openid/synapse_oauth2.py index 069bf2151..ebda3cab8 100644 --- a/fence/resources/openid/synapse_oauth2.py +++ b/fence/resources/openid/synapse_oauth2.py @@ -98,7 +98,7 @@ def load_key(self, jwks_endpoint): return None, None - def get_user_id(self, code): + def get_auth_info(self, code): try: token_endpoint = self.get_value_from_discovery_doc( "token_endpoint", config["SYNAPSE_URI"] + "/oauth2/token" diff --git a/fence/resources/user/user_session.py b/fence/resources/user/user_session.py index b17ffb775..326c84860 100644 --- a/fence/resources/user/user_session.py +++ b/fence/resources/user/user_session.py @@ -204,7 +204,7 @@ def save_session(self, app, session, response): except Unauthorized: user = None - user_sess_id = _get_user_id_from_session(session) + user_sess_id = _get_auth_info_from_session(session) # user_id == '' in session means no login has occured, which is # okay if user is hitting with just an access_token @@ -288,8 +288,8 @@ def _get_valid_access_token(app, session, request): return None # check that the current user is the one from the session and access_token - user_sess_id = _get_user_id_from_session(session) - token_user_id = _get_user_id_from_access_token(valid_access_token) + user_sess_id = _get_auth_info_from_session(session) + token_user_id = _get_auth_info_from_access_token(valid_access_token) if user.id != user_sess_id and user.username != user_sess_id: return None @@ -349,7 +349,7 @@ def _create_access_token_cookie(app, session, response, user): return response -def _get_user_id_from_session(session): +def _get_auth_info_from_session(session): """ Get user's identifier from the session. It could be their id or username since both are unique. @@ -365,7 +365,7 @@ def _get_user_id_from_session(session): return user_sess_id -def _get_user_id_from_access_token(access_token): +def _get_auth_info_from_access_token(access_token): """ Get user's identifier from the access token claims """ diff --git a/fence/sync/sync_users.py b/fence/sync/sync_users.py index dc6f68893..37d535832 100644 --- a/fence/sync/sync_users.py +++ b/fence/sync/sync_users.py @@ -335,7 +335,6 @@ def __init__( self.sync_from_local_yaml_file = sync_from_local_yaml_file self.is_sync_from_dbgap_server = is_sync_from_dbgap_server self.dbGaP = dbGaP - self.parse_consent_code = dbGaP[0].get("parse_consent_code", True) self.session = db_session self.driver = get_SQLAlchemyDriver(DB) self.project_mapping = project_mapping or {} @@ -352,7 +351,11 @@ def __init__( self.auth_source = defaultdict(set) # auth_source used for logging. username : [source1, source2] self.visa_types = config.get("USERSYNC", {}).get("visa_types", {}) - + self.parent_to_child_studies_mapping = {} + for dbgap_config in dbGaP: + self.parent_to_child_studies_mapping.update( + dbgap_config.get("parent_to_child_studies_mapping", {}) + ) if storage_credentials: self.storage_manager = StorageManager( storage_credentials, logger=self.logger @@ -455,6 +458,11 @@ def _get_from_ftp_with_proxy(self, server, path): ) os.system(execstr) + def _get_parse_consent_code(self, dbgap_config={}): + return dbgap_config.get( + "parse_consent_code", True + ) # Should this really be true? + def _parse_csv(self, file_dict, sess, dbgap_config={}, encrypted=True): """ parse csv files to python dict @@ -514,8 +522,9 @@ def _parse_csv(self, file_dict, sess, dbgap_config={}, encrypted=True): study_common_exchange_areas = dbgap_config.get( "study_common_exchange_areas", {} ) + parse_consent_code = self._get_parse_consent_code(dbgap_config) - if self.parse_consent_code and enable_common_exchange_area_access: + if parse_consent_code and enable_common_exchange_area_access: self.logger.info( f"using study to common exchange area mapping: {study_common_exchange_areas}" ) @@ -585,7 +594,7 @@ def _parse_csv(self, file_dict, sess, dbgap_config={}, encrypted=True): ) ) continue - if len(phsid) > 1 and self.parse_consent_code: + if len(phsid) > 1 and parse_consent_code: consent_code = phsid[-1] # c999 indicates full access to all consents and access @@ -622,6 +631,15 @@ def _parse_csv(self, file_dict, sess, dbgap_config={}, encrypted=True): dbgap_project += "." + consent_code + self._add_children_for_dbgap_project( + dbgap_project, + privileges, + username, + sess, + user_projects, + dbgap_config, + ) + display_name = row.get("user name") or "" tags = {"dbgap_role": row.get("role") or ""} @@ -651,6 +669,47 @@ def _parse_csv(self, file_dict, sess, dbgap_config={}, encrypted=True): return user_projects, user_info + def _get_children(self, dbgap_project): + return self.parent_to_child_studies_mapping.get(dbgap_project.split(".")[0]) + + def _add_children_for_dbgap_project( + self, dbgap_project, privileges, username, sess, user_projects, dbgap_config + ): + """ + Adds the configured child studies for the given dbgap_project, adding it to the provided user_projects. If + parse_consent_code is true, then the consents granted in the provided dbgap_project will also be granted to the + child studies. + """ + parent_phsid = dbgap_project + parse_consent_code = self._get_parse_consent_code(dbgap_config) + child_suffix = "" + if parse_consent_code and re.match( + config["DBGAP_ACCESSION_WITH_CONSENT_REGEX"], dbgap_project + ): + parent_phsid_parts = dbgap_project.split(".") + parent_phsid = parent_phsid_parts[0] + child_suffix = "." + parent_phsid_parts[1] + + if parent_phsid not in self.parent_to_child_studies_mapping: + return + + self.logger.info( + f"found parent study {parent_phsid} and Fence " + "is configured to provide additional access to child studies. Giving user " + f"{username} {privileges} privileges in projects: " + f"{{k + child_suffix: v + child_suffix for k, v in self.parent_to_child_studies_mapping.items()}}." + ) + child_studies = self.parent_to_child_studies_mapping.get(parent_phsid, []) + for child_study in child_studies: + self._add_dbgap_project_for_user( + child_study + child_suffix, + privileges, + username, + sess, + user_projects, + dbgap_config, + ) + def _add_dbgap_project_for_user( self, dbgap_project, privileges, username, sess, user_projects, dbgap_config ): @@ -705,7 +764,7 @@ def sync_two_phsids_dict( phsids2_overrides_phsids1=True, ): """ - Merge pshid1 into phsids2. If `phsids2_overrides_phsids1`, values in + Merge phsids1 into phsids2. If `phsids2_overrides_phsids1`, values in phsids1 are overriden by values in phsids2. phsids2 ends up containing the merged dict (see explanation below). `source1` and `source2`: for logging. @@ -1402,7 +1461,7 @@ def _process_user_projects( phsid = project.split(".") dbgap_project = phsid[0] privileges = user_projects[username][project] - if len(phsid) > 1 and self.parse_consent_code: + if len(phsid) > 1 and self._get_parse_consent_code(dbgap_config): consent_code = phsid[-1] # c999 indicates full access to all consents and access @@ -1548,10 +1607,11 @@ def _sync(self, sess): # Note: if there are multiple dbgap sftp servers configured # this parameter is always from the config for the first dbgap sftp server # not any additional ones - if self.parse_consent_code: - self._grant_all_consents_to_c999_users( - user_projects, user_yaml.project_to_resource - ) + for dbgap_config in self.dbGaP: + if self._get_parse_consent_code(dbgap_config): + self._grant_all_consents_to_c999_users( + user_projects, user_yaml.project_to_resource + ) google_update_ex = None @@ -1636,6 +1696,14 @@ def _grant_all_consents_to_c999_users( consent_mapping.setdefault(accession_number["phsid"], set()).add( ".".join([accession_number["phsid"], accession_number["consent"]]) ) + children = self._get_children(accession_number["phsid"]) + if children: + for child_phs in children: + consent_mapping.setdefault(child_phs, set()).add( + ".".join( + [child_phs, accession_number["consent"]] + ) # Assign parent consent to child study + ) self.logger.debug(f"consent mapping: {consent_mapping}") @@ -1800,6 +1868,36 @@ def _update_arborist(self, session, user_yaml): return True + def _revoke_all_policies_preserve_mfa(self, user): + """ + If MFA is enabled for the user's idp, check if they have the /multifactor_auth resource and restore the + mfa_policy after revoking all policies. + """ + username = user.username + idp = user.identity_provider.name if user.identity_provider else None + + is_mfa_enabled = "multifactor_auth_claim_info" in config["OPENID_CONNECT"].get( + idp, {} + ) + if not is_mfa_enabled: + # TODO This should be a diff, not a revocation of all policies. + self.arborist_client.revoke_all_policies_for_user(username) + return + + policies = [] + try: + policies = self.arborist_client.get_user()["policies"] + except Exception as e: + self.logger.error( + f"Could not retrieve user's policies, revoking all policies anyway. {e}" + ) + finally: + # TODO This should be a diff, not a revocation of all policies. + self.arborist_client.revoke_all_policies_for_user(username) + + if "mfa_policy" in policies: + status_code = self.arborist_client.grant_user_policy(username, "mfa_policy") + def _update_authz_in_arborist( self, session, @@ -1905,8 +2003,7 @@ def _update_authz_in_arborist( self.arborist_client.create_user_if_not_exist(username) if not single_user_sync: - # TODO make this smarter - it should do a diff, not revoke all and add - self.arborist_client.revoke_all_policies_for_user(username) + self._revoke_all_policies_preserve_mfa(user) # as of 2/11/2022, for single_user_sync, as RAS visa parsing has # previously mapped each project to the same set of privileges @@ -2299,63 +2396,6 @@ def _pick_sync_type(self, visa): return sync_client - def parse_user_visas(self, db_session): - """ - Retrieve all visas from fence db and parse to python dict - - Return: - Tuple[[dict, dict]]: - (user_project, user_info) where user_project is a mapping from - usernames to project permissions and user_info is a mapping - from usernames to user details, such as email - - Example: - - ( - { - username: { - 'project1': {'read-storage','write-storage'}, - 'project2': {'read-storage'}, - } - }, - { - username: { - 'email': 'email@mail.com', - 'display_name': 'display name', - 'phone_number': '123-456-789', - 'tags': {'dbgap_role': 'PI'} - } - }, - ) - - """ - user_projects = dict() - user_info = dict() - - users = db_session.query(User).all() - - for user in users: - projects = {} - info = {} - if user.ga4gh_visas_v1: - for visa in user.ga4gh_visas_v1: - project = {} - visa_type = self._pick_sync_type(visa) - encoded_visa = visa.ga4gh_visa - project, info = visa_type._parse_single_visa( - user, - encoded_visa, - visa.expires, - self.parse_consent_code, - ) - projects = {**projects, **project} - if projects: - self.auth_source[user.username].add("visas") - user_projects[user.username] = projects - user_info[user.username] = info - - return (user_projects, user_info) - def sync_single_user_visas(self, user, ga4gh_visas, sess=None, expires=None): """ Sync a single user's visas during login or DRS/data access @@ -2377,6 +2417,7 @@ def sync_single_user_visas(self, user, ga4gh_visas, sess=None, expires=None): """ self.ras_sync_client = RASVisa(logger=self.logger) dbgap_config = self.dbGaP[0] + parse_consent_code = self._get_parse_consent_code(dbgap_config) enable_common_exchange_area_access = dbgap_config.get( "enable_common_exchange_area_access", False ) @@ -2409,7 +2450,7 @@ def sync_single_user_visas(self, user, ga4gh_visas, sess=None, expires=None): user, encoded_visa, visa.expires, - self.parse_consent_code, + parse_consent_code, ) except Exception: self.logger.warning( @@ -2425,7 +2466,7 @@ def sync_single_user_visas(self, user, ga4gh_visas, sess=None, expires=None): user_projects = self.parse_projects(user_projects) - if self.parse_consent_code and enable_common_exchange_area_access: + if parse_consent_code and enable_common_exchange_area_access: self.logger.info( f"using study to common exchange area mapping: {study_common_exchange_areas}" ) @@ -2438,7 +2479,7 @@ def sync_single_user_visas(self, user, ga4gh_visas, sess=None, expires=None): sess, ) - if self.parse_consent_code: + if parse_consent_code: self._grant_all_consents_to_c999_users( user_projects, user_yaml.project_to_resource ) diff --git a/poetry.lock b/poetry.lock index 4782b56cd..3470631ca 100644 --- a/poetry.lock +++ b/poetry.lock @@ -14,14 +14,14 @@ files = [ [[package]] name = "alembic" -version = "1.11.1" +version = "1.12.0" description = "A database migration tool for SQLAlchemy." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "alembic-1.11.1-py3-none-any.whl", hash = "sha256:dc871798a601fab38332e38d6ddb38d5e734f60034baeb8e2db5b642fccd8ab8"}, - {file = "alembic-1.11.1.tar.gz", hash = "sha256:6a810a6b012c88b33458fceb869aef09ac75d6ace5291915ba7fae44de372c01"}, + {file = "alembic-1.12.0-py3-none-any.whl", hash = "sha256:03226222f1cf943deee6c85d9464261a6c710cd19b4fe867a3ad1f25afda610f"}, + {file = "alembic-1.12.0.tar.gz", hash = "sha256:8e7645c32e4f200675e69f0745415335eb59a3663f5feb487abfa0b30c45888b"}, ] [package.dependencies] @@ -49,25 +49,25 @@ dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] [[package]] name = "anyio" -version = "3.7.1" +version = "4.0.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, - {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, + {file = "anyio-4.0.0-py3-none-any.whl", hash = "sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f"}, + {file = "anyio-4.0.0.tar.gz", hash = "sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a"}, ] [package.dependencies] -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" [package.extras] -doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-jquery"] -test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (<0.22)"] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.22)"] [[package]] name = "atomicwrites" @@ -161,20 +161,20 @@ wrapt = "*" [[package]] name = "azure-core" -version = "1.28.0" +version = "1.29.3" description = "Microsoft Azure Core Library for Python" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "azure-core-1.28.0.zip", hash = "sha256:e9eefc66fc1fde56dab6f04d4e5d12c60754d5a9fa49bdcfd8534fc96ed936bd"}, - {file = "azure_core-1.28.0-py3-none-any.whl", hash = "sha256:dec36dfc8eb0b052a853f30c07437effec2f9e3e1fc8f703d9bdaa5cfc0043d9"}, + {file = "azure-core-1.29.3.tar.gz", hash = "sha256:c92700af982e71c8c73de9f4c20da8b3f03ce2c22d13066e4d416b4629c87903"}, + {file = "azure_core-1.29.3-py3-none-any.whl", hash = "sha256:f8b2910f92b66293d93bd00564924ad20ad48f4a1e150577cf18d1e7d4f9263c"}, ] [package.dependencies] requests = ">=2.18.4" six = ">=1.11.0" -typing-extensions = ">=4.3.0" +typing-extensions = ">=4.6.0" [package.extras] aio = ["aiohttp (>=3.0)"] @@ -575,14 +575,14 @@ files = [ [[package]] name = "click" -version = "8.1.6" +version = "8.1.7" description = "Composable command line interface toolkit" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, - {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] @@ -706,35 +706,35 @@ yaml = ["PyYAML (>=3.10)"] [[package]] name = "cryptography" -version = "41.0.2" +version = "41.0.3" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711"}, - {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7"}, - {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d"}, - {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f"}, - {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182"}, - {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83"}, - {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5"}, - {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58"}, - {file = "cryptography-41.0.2-cp37-abi3-win32.whl", hash = "sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76"}, - {file = "cryptography-41.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4"}, - {file = "cryptography-41.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a"}, - {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd"}, - {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766"}, - {file = "cryptography-41.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee"}, - {file = "cryptography-41.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831"}, - {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b"}, - {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa"}, - {file = "cryptography-41.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e"}, - {file = "cryptography-41.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14"}, - {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2"}, - {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f"}, - {file = "cryptography-41.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0"}, - {file = "cryptography-41.0.2.tar.gz", hash = "sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c"}, + {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507"}, + {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922"}, + {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81"}, + {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd"}, + {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47"}, + {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116"}, + {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c"}, + {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae"}, + {file = "cryptography-41.0.3-cp37-abi3-win32.whl", hash = "sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306"}, + {file = "cryptography-41.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574"}, + {file = "cryptography-41.0.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087"}, + {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858"}, + {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906"}, + {file = "cryptography-41.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e"}, + {file = "cryptography-41.0.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd"}, + {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207"}, + {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84"}, + {file = "cryptography-41.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7"}, + {file = "cryptography-41.0.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d"}, + {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de"}, + {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1"}, + {file = "cryptography-41.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4"}, + {file = "cryptography-41.0.3.tar.gz", hash = "sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34"}, ] [package.dependencies] @@ -764,23 +764,19 @@ files = [ [[package]] name = "dnspython" -version = "2.4.0" +version = "2.4.2" description = "DNS toolkit" category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "dnspython-2.4.0-py3-none-any.whl", hash = "sha256:46b4052a55b56beea3a3bdd7b30295c292bd6827dd442348bc116f2d35b17f0a"}, - {file = "dnspython-2.4.0.tar.gz", hash = "sha256:758e691dbb454d5ccf4e1b154a19e52847f79e21a42fef17b969144af29a4e6c"}, + {file = "dnspython-2.4.2-py3-none-any.whl", hash = "sha256:57c6fbaaeaaf39c891292012060beb141791735dbb4004798328fc2c467402d8"}, + {file = "dnspython-2.4.2.tar.gz", hash = "sha256:8dcfae8c7460a2f84b4072e26f1c9f4101ca20c071649cb7c34e8b6a93d58984"}, ] -[package.dependencies] -httpcore = {version = ">=0.17.3", markers = "python_version >= \"3.8\""} -sniffio = ">=1.1,<2.0" - [package.extras] dnssec = ["cryptography (>=2.6,<42.0)"] -doh = ["h2 (>=4.1.0)", "httpx (>=0.24.1)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=0.17.3)", "httpx (>=0.24.1)"] doq = ["aioquic (>=0.9.20)"] idna = ["idna (>=2.1,<4.0)"] trio = ["trio (>=0.14,<0.23)"] @@ -868,14 +864,14 @@ idna = ">=2.0.0" [[package]] name = "exceptiongroup" -version = "1.1.2" +version = "1.1.3" description = "Backport of PEP 654 (exception groups)" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, - {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, ] [package.extras] @@ -883,14 +879,14 @@ test = ["pytest (>=6)"] [[package]] name = "flask" -version = "2.3.2" +version = "2.3.3" description = "A simple framework for building complex web applications." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "Flask-2.3.2-py3-none-any.whl", hash = "sha256:77fd4e1249d8c9923de34907236b747ced06e5467ecac1a7bb7115ae0e9670b0"}, - {file = "Flask-2.3.2.tar.gz", hash = "sha256:8c2f9abd47a9e8df7f0c3f091ce9497d011dc3b31effcf4c85a6e2b50f4114ef"}, + {file = "flask-2.3.3-py3-none-any.whl", hash = "sha256:f69fcd559dc907ed196ab9df0e48471709175e696d6e698dd4dbe940f96ce66b"}, + {file = "flask-2.3.3.tar.gz", hash = "sha256:09c347a92aa7ff4a8e7f3206795f30d826654baf38b873d0744cd571ca609efc"}, ] [package.dependencies] @@ -899,7 +895,7 @@ click = ">=8.1.3" importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} itsdangerous = ">=2.1.2" Jinja2 = ">=3.1.2" -Werkzeug = ">=2.3.3" +Werkzeug = ">=2.3.7" [package.extras] async = ["asgiref (>=3.2)"] @@ -1270,14 +1266,14 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] name = "googleapis-common-protos" -version = "1.59.1" +version = "1.60.0" description = "Common protobufs used in Google APIs" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.59.1.tar.gz", hash = "sha256:b35d530fe825fb4227857bc47ad84c33c809ac96f312e13182bdeaa2abe1178a"}, - {file = "googleapis_common_protos-1.59.1-py2.py3-none-any.whl", hash = "sha256:0cbedb6fb68f1c07e18eb4c48256320777707e7d0c55063ae56c15db3224a61e"}, + {file = "googleapis-common-protos-1.60.0.tar.gz", hash = "sha256:e73ebb404098db405ba95d1e1ae0aa91c3e15a71da031a2eeb6b2e23e7bc3708"}, + {file = "googleapis_common_protos-1.60.0-py2.py3-none-any.whl", hash = "sha256:69f9bbcc6acde92cab2db95ce30a70bd2b81d20b12eff3f1aabaffcbe8a93918"}, ] [package.dependencies] @@ -1535,14 +1531,14 @@ files = [ [[package]] name = "jsonpickle" -version = "3.0.1" +version = "3.0.2" description = "Python library for serializing any arbitrary object graph into JSON" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "jsonpickle-3.0.1-py2.py3-none-any.whl", hash = "sha256:130d8b293ea0add3845de311aaba55e6d706d0bb17bc123bd2c8baf8a39ac77c"}, - {file = "jsonpickle-3.0.1.tar.gz", hash = "sha256:032538804795e73b94ead410800ac387fdb6de98f8882ac957fcd247e3a85200"}, + {file = "jsonpickle-3.0.2-py3-none-any.whl", hash = "sha256:4a8442d97ca3f77978afa58068768dba7bff2dbabe79a9647bc3cdafd4ef019f"}, + {file = "jsonpickle-3.0.2.tar.gz", hash = "sha256:e37abba4bfb3ca4a4647d28bb9f4706436f7b46c8a8333b4a718abafa8e46b37"}, ] [package.extras] @@ -1671,14 +1667,14 @@ test = ["unittest2 (>=1.1.0)"] [[package]] name = "more-itertools" -version = "10.0.0" +version = "10.1.0" description = "More routines for operating on iterables, beyond itertools" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "more-itertools-10.0.0.tar.gz", hash = "sha256:cd65437d7c4b615ab81c0640c0480bc29a550ea032891977681efd28344d51e1"}, - {file = "more_itertools-10.0.0-py3-none-any.whl", hash = "sha256:928d514ffd22b5b0a8fce326d57f423a55d2ff783b093bab217eda71e732330f"}, + {file = "more-itertools-10.1.0.tar.gz", hash = "sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a"}, + {file = "more_itertools-10.1.0-py3-none-any.whl", hash = "sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6"}, ] [[package]] @@ -1772,14 +1768,14 @@ files = [ [[package]] name = "pluggy" -version = "1.2.0" +version = "1.3.0" description = "plugin and hook calling mechanisms for python" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, - {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, ] [package.extras] @@ -1835,25 +1831,23 @@ files = [ [[package]] name = "psycopg2" -version = "2.9.6" +version = "2.9.7" description = "psycopg2 - Python-PostgreSQL Database Adapter" category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "psycopg2-2.9.6-cp310-cp310-win32.whl", hash = "sha256:f7a7a5ee78ba7dc74265ba69e010ae89dae635eea0e97b055fb641a01a31d2b1"}, - {file = "psycopg2-2.9.6-cp310-cp310-win_amd64.whl", hash = "sha256:f75001a1cbbe523e00b0ef896a5a1ada2da93ccd752b7636db5a99bc57c44494"}, - {file = "psycopg2-2.9.6-cp311-cp311-win32.whl", hash = "sha256:53f4ad0a3988f983e9b49a5d9765d663bbe84f508ed655affdb810af9d0972ad"}, - {file = "psycopg2-2.9.6-cp311-cp311-win_amd64.whl", hash = "sha256:b81fcb9ecfc584f661b71c889edeae70bae30d3ef74fa0ca388ecda50b1222b7"}, - {file = "psycopg2-2.9.6-cp36-cp36m-win32.whl", hash = "sha256:11aca705ec888e4f4cea97289a0bf0f22a067a32614f6ef64fcf7b8bfbc53744"}, - {file = "psycopg2-2.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:36c941a767341d11549c0fbdbb2bf5be2eda4caf87f65dfcd7d146828bd27f39"}, - {file = "psycopg2-2.9.6-cp37-cp37m-win32.whl", hash = "sha256:869776630c04f335d4124f120b7fb377fe44b0a7645ab3c34b4ba42516951889"}, - {file = "psycopg2-2.9.6-cp37-cp37m-win_amd64.whl", hash = "sha256:a8ad4a47f42aa6aec8d061fdae21eaed8d864d4bb0f0cade5ad32ca16fcd6258"}, - {file = "psycopg2-2.9.6-cp38-cp38-win32.whl", hash = "sha256:2362ee4d07ac85ff0ad93e22c693d0f37ff63e28f0615a16b6635a645f4b9214"}, - {file = "psycopg2-2.9.6-cp38-cp38-win_amd64.whl", hash = "sha256:d24ead3716a7d093b90b27b3d73459fe8cd90fd7065cf43b3c40966221d8c394"}, - {file = "psycopg2-2.9.6-cp39-cp39-win32.whl", hash = "sha256:1861a53a6a0fd248e42ea37c957d36950da00266378746588eab4f4b5649e95f"}, - {file = "psycopg2-2.9.6-cp39-cp39-win_amd64.whl", hash = "sha256:ded2faa2e6dfb430af7713d87ab4abbfc764d8d7fb73eafe96a24155f906ebf5"}, - {file = "psycopg2-2.9.6.tar.gz", hash = "sha256:f15158418fd826831b28585e2ab48ed8df2d0d98f502a2b4fe619e7d5ca29011"}, + {file = "psycopg2-2.9.7-cp310-cp310-win32.whl", hash = "sha256:1a6a2d609bce44f78af4556bea0c62a5e7f05c23e5ea9c599e07678995609084"}, + {file = "psycopg2-2.9.7-cp310-cp310-win_amd64.whl", hash = "sha256:b22ed9c66da2589a664e0f1ca2465c29b75aaab36fa209d4fb916025fb9119e5"}, + {file = "psycopg2-2.9.7-cp311-cp311-win32.whl", hash = "sha256:44d93a0109dfdf22fe399b419bcd7fa589d86895d3931b01fb321d74dadc68f1"}, + {file = "psycopg2-2.9.7-cp311-cp311-win_amd64.whl", hash = "sha256:91e81a8333a0037babfc9fe6d11e997a9d4dac0f38c43074886b0d9dead94fe9"}, + {file = "psycopg2-2.9.7-cp37-cp37m-win32.whl", hash = "sha256:d1210fcf99aae6f728812d1d2240afc1dc44b9e6cba526a06fb8134f969957c2"}, + {file = "psycopg2-2.9.7-cp37-cp37m-win_amd64.whl", hash = "sha256:e9b04cbef584310a1ac0f0d55bb623ca3244c87c51187645432e342de9ae81a8"}, + {file = "psycopg2-2.9.7-cp38-cp38-win32.whl", hash = "sha256:d5c5297e2fbc8068d4255f1e606bfc9291f06f91ec31b2a0d4c536210ac5c0a2"}, + {file = "psycopg2-2.9.7-cp38-cp38-win_amd64.whl", hash = "sha256:8275abf628c6dc7ec834ea63f6f3846bf33518907a2b9b693d41fd063767a866"}, + {file = "psycopg2-2.9.7-cp39-cp39-win32.whl", hash = "sha256:c7949770cafbd2f12cecc97dea410c514368908a103acf519f2a346134caa4d5"}, + {file = "psycopg2-2.9.7-cp39-cp39-win_amd64.whl", hash = "sha256:b6bd7d9d3a7a63faae6edf365f0ed0e9b0a1aaf1da3ca146e6b043fb3eb5d723"}, + {file = "psycopg2-2.9.7.tar.gz", hash = "sha256:f00cc35bd7119f1fed17b85bd1007855194dde2cbd8de01ab8ebb17487440ad8"}, ] [[package]] @@ -2017,14 +2011,14 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pyparsing" -version = "3.1.0" +version = "3.1.1" description = "pyparsing module - Classes and methods to define and execute parsing grammars" category = "main" optional = false python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.1.0-py3-none-any.whl", hash = "sha256:d554a96d1a7d3ddaf7183104485bc19fd80543ad6ac5bdb6426719d766fb06c1"}, - {file = "pyparsing-3.1.0.tar.gz", hash = "sha256:edb662d6fe322d6e990b1594b5feaeadf806803359e3d4d42f11e295e588f0ea"}, + {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, + {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, ] [package.extras] @@ -2225,21 +2219,21 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "responses" -version = "0.23.1" +version = "0.23.3" description = "A utility library for mocking out the `requests` Python library." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "responses-0.23.1-py3-none-any.whl", hash = "sha256:8a3a5915713483bf353b6f4079ba8b2a29029d1d1090a503c70b0dc5d9d0c7bd"}, - {file = "responses-0.23.1.tar.gz", hash = "sha256:c4d9aa9fc888188f0c673eff79a8dadbe2e75b7fe879dc80a221a06e0a68138f"}, + {file = "responses-0.23.3-py3-none-any.whl", hash = "sha256:e6fbcf5d82172fecc0aa1860fd91e58cbfd96cee5e96da5b63fa6eb3caa10dd3"}, + {file = "responses-0.23.3.tar.gz", hash = "sha256:205029e1cb334c21cb4ec64fc7599be48b859a0fd381a42443cdd600bfe8b16a"}, ] [package.dependencies] pyyaml = "*" -requests = ">=2.22.0,<3.0" +requests = ">=2.30.0,<3.0" types-PyYAML = "*" -urllib3 = ">=1.25.10" +urllib3 = ">=1.25.10,<3.0" [package.extras] tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-requests"] @@ -2292,19 +2286,19 @@ botocore = ">=1.12.36,<2.0.0" [[package]] name = "setuptools" -version = "68.0.0" +version = "68.1.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, - {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, + {file = "setuptools-68.1.2-py3-none-any.whl", hash = "sha256:3d8083eed2d13afc9426f227b24fd1659489ec107c0e86cec2ffdde5c92e790b"}, + {file = "setuptools-68.1.2.tar.gz", hash = "sha256:3d4dfa6d95f1b101d695a6160a7626e15583af71a5f52176efa5d39a054d475d"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5,<=7.1.2)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -2501,31 +2495,31 @@ sqlalchemy = ">=1.3.3" [[package]] name = "websocket-client" -version = "1.6.1" +version = "1.6.2" description = "WebSocket client for Python with low level API options" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "websocket-client-1.6.1.tar.gz", hash = "sha256:c951af98631d24f8df89ab1019fc365f2227c0892f12fd150e935607c79dd0dd"}, - {file = "websocket_client-1.6.1-py3-none-any.whl", hash = "sha256:f1f9f2ad5291f0225a49efad77abf9e700b6fef553900623060dad6e26503b9d"}, + {file = "websocket-client-1.6.2.tar.gz", hash = "sha256:53e95c826bf800c4c465f50093a8c4ff091c7327023b10bfaff40cf1ef170eaa"}, + {file = "websocket_client-1.6.2-py3-none-any.whl", hash = "sha256:ce54f419dfae71f4bdba69ebe65bf7f0a93fe71bc009ad3a010aacc3eebad537"}, ] [package.extras] -docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] +docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] [[package]] name = "werkzeug" -version = "2.3.6" +version = "2.3.7" description = "The comprehensive WSGI web application library." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "Werkzeug-2.3.6-py3-none-any.whl", hash = "sha256:935539fa1413afbb9195b24880778422ed620c0fc09670945185cce4d91a8890"}, - {file = "Werkzeug-2.3.6.tar.gz", hash = "sha256:98c774df2f91b05550078891dee5f0eb0cb797a522c757a2452b9cee5b202330"}, + {file = "werkzeug-2.3.7-py3-none-any.whl", hash = "sha256:effc12dba7f3bd72e605ce49807bbe692bd729c3bb122a3b91747a6ae77df528"}, + {file = "werkzeug-2.3.7.tar.gz", hash = "sha256:2b8c0e447b4b9dbcc85dd97b6eeb4dcbaf6c8b6c3be0bd654e25553e0a2157d8"}, ] [package.dependencies] @@ -2668,4 +2662,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0.0" -content-hash = "2000f442c0b6cfa7c3d22584b27cbc94f09094e8302df46fe75280a16293d6b0" +content-hash = "5718ddd84fe1d37165c2533e23b1845a65b80acffdf67aaf7deb1eb3713ee48a" diff --git a/pyproject.toml b/pyproject.toml index a8ac4dbc7..cb5facdc5 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "fence" -version = "9.0.0" +version = "9.2.0" description = "Gen3 AuthN/AuthZ OIDC Service" authors = ["CTDS UChicago "] license = "Apache-2.0" @@ -22,7 +22,7 @@ cached_property = "^1.5.1" cdiserrors = "<2.0.0" cdislogging = "^1.0.0" cdispyutils = "^2.0.1" -cryptography = ">=3.2" +cryptography = ">=41.0.2" flask = ">=2.2.3" flask-cors = ">=3.0.3" flask-restful = ">=0.3.8" diff --git a/tests/conftest.py b/tests/conftest.py index 902db3f95..0ecbd0da2 100755 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -378,7 +378,7 @@ def return_false(): @pytest.fixture(scope="function") def mock_arborist_requests(request): """ - This fixture returns a function which you call to mock out arborist endopints. + This fixture returns a function which you call to mock out arborist endpoints. Give it an argument in this format: { "arborist/health": { diff --git a/tests/dbgap_sync/conftest.py b/tests/dbgap_sync/conftest.py index e80564fa7..48907ba5b 100644 --- a/tests/dbgap_sync/conftest.py +++ b/tests/dbgap_sync/conftest.py @@ -99,51 +99,6 @@ def syncer(db_session, request, rsa_private_key, kid): storage_credentials = {str(backend_name): {"backend": backend}} provider = [{"name": backend_name, "backend": backend}] - users = [ - { - "username": "TESTUSERB", - "is_admin": True, - "email": "userA@gmail.com", - "idp_name": "ras", - }, - { - "username": "USER_1", - "is_admin": True, - "email": "user1@gmail.com", - "idp_name": "ras", - }, - { - "username": "test_user1@gmail.com", - "is_admin": False, - "email": "test_user1@gmail.com", - "idp_name": "ras", - }, - { - "username": "deleted_user@gmail.com", - "is_admin": True, - "email": "deleted_user@gmail.com", - "idp_name": "ras", - }, - { - "username": "TESTUSERD", - "is_admin": True, - "email": "userD@gmail.com", - "idp_name": "ras", - }, - { - "username": "expired_visa_user", - "is_admin": False, - "email": "expired@expired.com", - "idp_name": "ras", - }, - { - "username": "invalid_visa_user", - "is_admin": False, - "email": "invalid@invalid.com", - "idp_name": "ras", - }, - ] - projects = [ { "auth_id": "TCGA-PCAWG", @@ -221,6 +176,8 @@ def mocked_get(path, **kwargs): syncer_obj._create_arborist_resources = MagicMock() + syncer_obj.arborist_client.revoke_all_policies_for_user = MagicMock() + for element in provider: udm.create_provider(db_session, element["name"], backend=element["backend"]) diff --git a/tests/dbgap_sync/data/csv/authentication_file_phs001179.txt b/tests/dbgap_sync/data/csv/authentication_file_phs001179.txt new file mode 100644 index 000000000..941411a75 --- /dev/null +++ b/tests/dbgap_sync/data/csv/authentication_file_phs001179.txt @@ -0,0 +1,3 @@ +user name, login, authority, role, email, phone, status, phsid, permission set, created, updated, expires, downloader for +USER KYLE,TESTPARENTAUTHZ,eRA,PI,kyle@com,"123-456-789",active,phs001179.v9.p8.c1,"General Research Use",2013-03-19 12:32:12.600,2015-05-14 16:01:16.923,2016-05-14 00:00:00.000, +USER KYLE2,TESTPARENTAUTHZ999,eRA,PI,kyle@com,"123-456-789",active,phs001179.v9.p8.c999,"General Research Use",2013-03-19 12:32:12.600,2015-05-14 16:01:16.923,2016-05-14 00:00:00.000, diff --git a/tests/dbgap_sync/test_user_sync.py b/tests/dbgap_sync/test_user_sync.py index af885b582..579f1b416 100644 --- a/tests/dbgap_sync/test_user_sync.py +++ b/tests/dbgap_sync/test_user_sync.py @@ -1,30 +1,23 @@ import os import pytest -import yaml import collections import asyncio -import flask from unittest.mock import MagicMock, patch import mock +from userdatamodel.user import IdentityProvider from fence import models -from fence.resources.google import access_utils -from fence.resources.google.access_utils import ( - GoogleUpdateException, - bulk_update_google_groups, -) -from fence.sync.sync_users import _format_policy_id +from fence.resources.google.access_utils import GoogleUpdateException from fence.config import config from fence.job.visa_update_cronjob import Visa_Token_Update -from tests.dbgap_sync.conftest import LOCAL_YAML_DIR -from tests.utils import TEST_RAS_USERNAME, TEST_RAS_SUB from tests.dbgap_sync.conftest import ( get_test_encoded_decoded_visa_and_exp, fake_ras_login, ) from tests.conftest import get_subjects_to_passports +from fence.models import User def equal_project_access(d1, d2): @@ -82,29 +75,42 @@ def test_sync_incorrect_user_yaml_file(syncer, monkeypatch, db_session): @pytest.mark.parametrize("allow_non_dbgap_whitelist", [False, True]) @pytest.mark.parametrize("syncer", ["google", "cleversafe"], indirect=True) @pytest.mark.parametrize("parse_consent_code_config", [False, True]) +@pytest.mark.parametrize("parent_to_child_studies_mapping", [False, True]) def test_sync( syncer, db_session, allow_non_dbgap_whitelist, storage_client, parse_consent_code_config, + parent_to_child_studies_mapping, monkeypatch, ): # patch the sync to use the parameterized config value - monkeypatch.setitem( - syncer.dbGaP[0], "parse_consent_code", parse_consent_code_config - ) - monkeypatch.setattr(syncer, "parse_consent_code", parse_consent_code_config) + for dbgap_config in syncer.dbGaP: + monkeypatch.setitem( + dbgap_config, "parse_consent_code", parse_consent_code_config + ) monkeypatch.setitem( syncer.dbGaP[2], "allow_non_dbGaP_whitelist", allow_non_dbgap_whitelist ) + if parent_to_child_studies_mapping: + mapping = { + "phs001179": ["phs000179", "phs000178"], + } + monkeypatch.setitem( + syncer.dbGaP[0], + "parent_to_child_studies_mapping", + mapping, + ) + monkeypatch.setattr(syncer, "parent_to_child_studies_mapping", mapping) + syncer.sync() users = db_session.query(models.User).all() - # 5 from user.yaml, 4 from fake dbgap SFTP - assert len(users) == 9 + # 5 from user.yaml, 6 from fake dbgap SFTP + assert len(users) == 11 if parse_consent_code_config: if allow_non_dbgap_whitelist: @@ -167,6 +173,33 @@ def test_sync( "phs000178.c1": ["read", "read-storage"], }, ) + if parent_to_child_studies_mapping: + user = models.query_for_user( + session=db_session, username="TESTPARENTAUTHZ" + ) + assert equal_project_access( + user.project_access, + { + "phs000178.c1": ["read", "read-storage"], + "phs000179.c1": ["read", "read-storage"], + "phs001179.c1": ["read", "read-storage"], + }, + ) + user = models.query_for_user( + session=db_session, username="TESTPARENTAUTHZ999" + ) + assert equal_project_access( + user.project_access, + { + "phs000178.c1": ["read", "read-storage"], + "phs000178.c2": ["read", "read-storage"], + "phs000178.c999": ["read", "read-storage"], + "phs000179.c1": ["read", "read-storage"], + "phs000179.c999": ["read", "read-storage"], + "phs001179.c999": ["read", "read-storage"], + "phs001179.c1": ["read", "read-storage"], + }, + ) else: if allow_non_dbgap_whitelist: user = models.query_for_user(session=db_session, username="TESTUSERD") @@ -230,6 +263,29 @@ def test_sync( "phs000179": ["read", "read-storage"], }, ) + if parent_to_child_studies_mapping: + user = models.query_for_user( + session=db_session, username="TESTPARENTAUTHZ" + ) + assert equal_project_access( + user.project_access, + { + "phs000178": ["read", "read-storage"], + "phs000179": ["read", "read-storage"], + "phs001179": ["read", "read-storage"], + }, + ) + user = models.query_for_user( + session=db_session, username="TESTPARENTAUTHZ999" + ) + assert equal_project_access( + user.project_access, + { + "phs000178": ["read", "read-storage"], + "phs000179": ["read", "read-storage"], + "phs001179": ["read", "read-storage"], + }, + ) user = models.query_for_user(session=db_session, username="TESTUSERD") assert user.display_name == "USER D" @@ -272,10 +328,10 @@ def test_dbgap_consent_codes( "enable_common_exchange_area_access", enable_common_exchange_area, ) - monkeypatch.setattr(syncer, "parse_consent_code", parse_consent_code_config) - monkeypatch.setitem( - syncer.dbGaP[0], "parse_consent_code", parse_consent_code_config - ) + for dbgap_config in syncer.dbGaP: + monkeypatch.setitem( + dbgap_config, "parse_consent_code", parse_consent_code_config + ) monkeypatch.setattr(syncer, "project_mapping", {}) @@ -908,8 +964,8 @@ def test_user_sync_with_visa_sync_job( users_after = db_session.query(models.User).all() - # 5 from user.yaml, 4 from fake dbgap SFTP - assert len(users_after) == 9 + # 5 from user.yaml, 6 from fake dbgap SFTP + assert len(users_after) == 11 for user in users_after: if user.username in setup_info["usernames_to_ras_subjects"]: @@ -975,3 +1031,108 @@ def test_user_sync_with_visa_sync_job( setup_info["usernames_to_ras_subjects"][valid_user.username] ] ) + + +@pytest.mark.parametrize("syncer", ["cleversafe", "google"], indirect=True) +def test_revoke_all_policies_preserve_mfa(monkeypatch, db_session, syncer): + """ + Test that the mfa_policy is re-granted to the user after revoking all their policies. + """ + monkeypatch.setitem( + config, + "OPENID_CONNECT", + { + "mock_idp": { + "multifactor_auth_claim_info": {"claim": "acr", "values": ["mfa"]} + } + }, + ) + user = User( + username="mockuser", identity_provider=IdentityProvider(name="mock_idp") + ) + syncer.arborist_client.get_user.return_value = {"policies": ["mfa_policy"]} + syncer._revoke_all_policies_preserve_mfa(user) + syncer.arborist_client.revoke_all_policies_for_user.assert_called_with( + user.username + ) + syncer.arborist_client.grant_user_policy.assert_called_with( + user.username, "mfa_policy" + ) + + +@pytest.mark.parametrize("syncer", ["cleversafe", "google"], indirect=True) +def test_revoke_all_policies_preserve_mfa_no_mfa(monkeypatch, db_session, syncer): + """ + Test to ensure the mfa_policy preservation does not occur if the user does not have the mfa resource granted. + """ + monkeypatch.setitem( + config, + "OPENID_CONNECT", + { + "mock_idp": { + "multifactor_auth_claim_info": {"claim": "acr", "values": ["mfa"]} + } + }, + ) + user = User( + username="mockuser", identity_provider=IdentityProvider(name="mock_idp") + ) + syncer.arborist_client.list_resources_for_user.return_value = [ + "/programs/phs0001111" + ] + syncer._revoke_all_policies_preserve_mfa(user) + syncer.arborist_client.revoke_all_policies_for_user.assert_called_with( + user.username + ) + syncer.arborist_client.grant_user_policy.assert_not_called() + + +@pytest.mark.parametrize("syncer", ["cleversafe", "google"], indirect=True) +def test_revoke_all_policies_preserve_mfa_no_idp(monkeypatch, db_session, syncer): + """ + Tests for when no IDP is associated with the user + """ + monkeypatch.setitem( + config, + "OPENID_CONNECT", + { + "mock_idp": { + "multifactor_auth_claim_info": {"claim": "acr", "values": ["mfa"]} + } + }, + ) + user = User(username="mockuser") + syncer._revoke_all_policies_preserve_mfa(user) + syncer.arborist_client.revoke_all_policies_for_user.assert_called_with( + user.username + ) + syncer.arborist_client.grant_user_policy.assert_not_called() + syncer.arborist_client.list_resources_for_user.assert_not_called() + + +@pytest.mark.parametrize("syncer", ["cleversafe", "google"], indirect=True) +def test_revoke_all_policies_preserve_mfa_ensure_revoke_on_error( + monkeypatch, db_session, syncer +): + """ + Tests that arborist_client.revoke_all_policies is still called when an error occurs + """ + monkeypatch.setitem( + config, + "OPENID_CONNECT", + { + "mock_idp": { + "multifactor_auth_claim_info": {"claim": "acr", "values": ["mfa"]} + } + }, + ) + user = User( + username="mockuser", identity_provider=IdentityProvider(name="mock_idp") + ) + syncer.arborist_client.list_resources_for_user.side_effect = Exception( + "Unknown error" + ) + syncer._revoke_all_policies_preserve_mfa(user) + syncer.arborist_client.revoke_all_policies_for_user.assert_called_with( + user.username + ) diff --git a/tests/link/conftest.py b/tests/link/conftest.py index 432dad6e4..c9292a7d2 100644 --- a/tests/link/conftest.py +++ b/tests/link/conftest.py @@ -21,7 +21,7 @@ def add_new_g_acnt_mock(db_session): @pytest.fixture(scope="function") def google_auth_get_user_info_mock(): mock = MagicMock() - patcher = patch("flask.current_app.google_client.get_user_id", mock) + patcher = patch("flask.current_app.google_client.get_auth_info", mock) patcher.start() yield mock diff --git a/tests/login/test_base.py b/tests/login/test_base.py new file mode 100644 index 000000000..a9bfff7ec --- /dev/null +++ b/tests/login/test_base.py @@ -0,0 +1,56 @@ +from fence.blueprints.login import DefaultOAuth2Callback +from fence.config import config +from unittest.mock import MagicMock, patch + + +@patch("fence.blueprints.login.base.prepare_login_log") +def test_post_login_set_mfa(app, monkeypatch): + """ + Verifies the arborist is called with the mfa_policy if a given token contains the claims found in the + configured multifactor_auth_claim_info + """ + monkeypatch.setitem( + config, + "OPENID_CONNECT", + { + "mock_idp": { + "multifactor_auth_claim_info": {"claim": "acr", "values": ["mfa"]} + } + }, + ) + callback = DefaultOAuth2Callback( + "mock_idp", MagicMock(), username_field="username", app=app + ) + + app.arborist = MagicMock() + token_result = {"username": "lisasimpson", "mfa": True} + callback.post_login(token_result=token_result) + app.arborist.grant_user_policy.assert_called_with( + username=token_result["username"], policy_id="mfa_policy" + ) + + token_result = {"username": "homersimpson", "mfa": False} + callback.post_login(token_result=token_result) + app.arborist.revoke_user_policy.assert_called_with( + username=token_result["username"], policy_id="mfa_policy" + ) + + +@patch("fence.blueprints.login.base.prepare_login_log") +def test_post_login_no_mfa_enabled(app, monkeypatch): + """ + Verifies arborist is not called when there is no multifactor_auth_claim_info defined for the given IDP. + """ + app.arborist = MagicMock() + monkeypatch.setitem( + config, + "OPENID_CONNECT", + {"mock_idp": {}}, + ) + with app.app_context(): + callback = DefaultOAuth2Callback( + "mock_idp", MagicMock(), username_field="username" + ) + token_result = {"username": "lisasimpson"} + callback.post_login(token_result=token_result) + app.arborist.revoke_user_policy.assert_not_called() diff --git a/tests/login/test_idp_oauth2.py b/tests/login/test_idp_oauth2.py new file mode 100644 index 000000000..40ae2349a --- /dev/null +++ b/tests/login/test_idp_oauth2.py @@ -0,0 +1,85 @@ +import pytest +from cdislogging import get_logger + +from fence import Oauth2ClientBase + +MOCK_SETTINGS_ACR = { + "client_id": "client", + "client_secret": "hunter2", + "redirect_url": "localhost", + "multifactor_auth_claim_info": { + "claim": "acr", + "values": ["mfa", "otp", "duo", "sms", "phonecall"], + }, +} +MOCK_SETTINGS_AMR = { + "client_id": "client", + "client_secret": "hunter2", + "redirect_url": "localhost", + "multifactor_auth_claim_info": { + "claim": "amr", + "values": ["mfa", "otp", "duo", "sms", "phonecall"], + }, +} +logger = get_logger(__name__, log_level="debug") + + +@pytest.fixture() +def oauth_client_acr(): + return Oauth2ClientBase(settings=MOCK_SETTINGS_ACR, idp="mock", logger=logger) + + +@pytest.fixture() +def oauth_client_amr(): + return Oauth2ClientBase(settings=MOCK_SETTINGS_AMR, idp="mock", logger=logger) + + +def test_has_mfa_claim_acr(oauth_client_acr): + has_mfa = oauth_client_acr.has_mfa_claim({"acr": "mfa"}) + assert has_mfa + + +def test_has_mfa_claim_acr(oauth_client_acr): + has_mfa = oauth_client_acr.has_mfa_claim({"acr": "mfa"}) + assert has_mfa + + +def test_has_mfa_claim_multiple_acr(oauth_client_acr): + has_mfa = oauth_client_acr.has_mfa_claim({"acr": "mfa otp duo"}) + assert has_mfa + + +def test_does_not_has_mfa_claim(oauth_client_acr): + has_mfa = oauth_client_acr.has_mfa_claim({"acr": "pwd"}) + assert not has_mfa + + has_mfa = oauth_client_acr.has_mfa_claim({"something": "mfa"}) + assert not has_mfa + + +def test_does_not_has_mfa_claim_multiple(oauth_client_acr): + has_mfa = oauth_client_acr.has_mfa_claim({"acr": "pwd trustme"}) + assert not has_mfa + + +def test_has_mfa_claim_amr(oauth_client_amr): + has_mfa = oauth_client_amr.has_mfa_claim({"amr": ["mfa"]}) + assert has_mfa + + +def test_has_mfa_claim_multiple_amr(oauth_client_amr): + has_mfa = oauth_client_amr.has_mfa_claim({"amr": ["mfa", "otp", "duo"]}) + assert has_mfa + + +def test_does_not_has_mfa_claim_amr(oauth_client_amr): + has_mfa = oauth_client_amr.has_mfa_claim({"amr": ["pwd"]}) + assert not has_mfa + + has_mfa = oauth_client_amr.has_mfa_claim({"something": ["mfa"]}) + assert not has_mfa + + +def test_does_not_has_mfa_claim_multiple_amr(oauth_client_amr): + has_mfa = oauth_client_amr.has_mfa_claim({"amr": ["pwd, trustme"]}) + assert not has_mfa diff --git a/tests/login/test_microsoft_login.py b/tests/login/test_microsoft_login.py index 23343a7d2..972b8a07f 100755 --- a/tests/login/test_microsoft_login.py +++ b/tests/login/test_microsoft_login.py @@ -13,7 +13,7 @@ def test_get_auth_url(microsoft_oauth2_client): assert url # nosec -def test_get_user_id(microsoft_oauth2_client): +def test_get_auth_info(microsoft_oauth2_client): """ Test getting a user id and check for email claim """ @@ -23,12 +23,12 @@ def test_get_user_id(microsoft_oauth2_client): "fence.resources.openid.idp_oauth2.Oauth2ClientBase.get_jwt_claims_identity", return_value=return_value, ): - user_id = microsoft_oauth2_client.get_user_id(code="123") + user_id = microsoft_oauth2_client.get_auth_info(code="123") for key, value in expected_value.items(): assert return_value[key] == value -def test_get_user_id_missing_claim(microsoft_oauth2_client): +def test_get_auth_info_missing_claim(microsoft_oauth2_client): """ Test getting a user id but missing the email claim """ @@ -38,15 +38,15 @@ def test_get_user_id_missing_claim(microsoft_oauth2_client): "fence.resources.openid.idp_oauth2.Oauth2ClientBase.get_jwt_claims_identity", return_value=return_value, ): - user_id = microsoft_oauth2_client.get_user_id(code="123") + user_id = microsoft_oauth2_client.get_auth_info(code="123") assert user_id == expected_value # nosec -def test_get_user_id_invalid_code(microsoft_oauth2_client): +def test_get_auth_info_invalid_code(microsoft_oauth2_client): """ Test getting a user id but with an invalid code """ expected_value = "Can't get your Microsoft email:" - user_id = microsoft_oauth2_client.get_user_id(code="123") + user_id = microsoft_oauth2_client.get_auth_info(code="123") assert expected_value in user_id["error"] # nosec diff --git a/tests/test_app_config.py b/tests/test_app_config.py index 283555f90..3c3af9fd0 100755 --- a/tests/test_app_config.py +++ b/tests/test_app_config.py @@ -9,6 +9,7 @@ from azure.core.exceptions import ResourceNotFoundError import fence from fence import app_init, _check_azure_storage +from fence.config import FenceConfig from tests import test_settings from tests.conftest import FakeBlobServiceClient @@ -58,7 +59,7 @@ def test_check_azure_storage_resource_not_found_error(): def test_app_config(): """ - Test app_init call using the 'test-fence-config.yaml' + Test app_init call using the "test-fence-config.yaml" This includes the check to verify underlying storage """ @@ -118,3 +119,22 @@ def test_app_config(): for patcher in patchers: patcher.stop() + + +def test_app_config_parent_child_study_mapping(monkeypatch): + invalid_dbgap_configs = [ + { + "parent_to_child_studies_mapping": { + "phs001194": ["phs000571", "phs001843"], + "phs001193": ["phs000572", "phs001844"], + } + }, + { + "parent_to_child_studies_mapping": { + "phs001194": ["phs0015623"], + "phs001192": ["phs0001", "phs002"], + } + }, + ] + with pytest.raises(Exception): + FenceConfig._validate_parent_child_studies(invalid_dbgap_configs) diff --git a/tests/test_audit_service.py b/tests/test_audit_service.py index ed59b9528..151c40bca 100644 --- a/tests/test_audit_service.py +++ b/tests/test_audit_service.py @@ -400,27 +400,27 @@ def test_login_log_login_endpoint( callback_endpoint = "login" idp_name = idp headers = {} - get_user_id_value = {} + get_auth_info_value = {} jwt_string = jwt.encode( {"iat": int(time.time())}, key=rsa_private_key, algorithm="RS256" ) if idp == "synapse": - get_user_id_value = { + get_auth_info_value = { "fence_username": username, "sub": username, "given_name": username, "family_name": username, } elif idp == "orcid": - get_user_id_value = {"orcid": username} + get_auth_info_value = {"orcid": username} elif idp == "cilogon": - get_user_id_value = {"sub": username} + get_auth_info_value = {"sub": username} elif idp == "shib": headers["persistent_id"] = username idp_name = "itrust" elif idp == "okta": - get_user_id_value = {"okta": username} + get_auth_info_value = {"okta": username} elif idp == "fence": mocked_fetch_access_token = MagicMock(return_value={"id_token": jwt_string}) patch( @@ -434,7 +434,7 @@ def test_login_log_login_endpoint( f"fence.blueprints.login.fence_login.validate_jwt", mocked_validate_jwt ).start() elif idp == "ras": - get_user_id_value = {"username": username} + get_auth_info_value = {"username": username} callback_endpoint = "callback" # these should be populated by a /login/ call that we're skipping: flask.g.userinfo = {"sub": "testSub123"} @@ -444,20 +444,20 @@ def test_login_log_login_endpoint( } flask.g.encoded_visas = "" elif idp == "generic1": - get_user_id_value = {"generic1_username": username} + get_auth_info_value = {"generic1_username": username} elif idp == "generic2": - get_user_id_value = {"sub": username} + get_auth_info_value = {"sub": username} if idp in ["google", "microsoft", "okta", "synapse", "cognito"]: - get_user_id_value["email"] = username + get_auth_info_value["email"] = username - get_user_id_patch = None - if get_user_id_value: - mocked_get_user_id = MagicMock(return_value=get_user_id_value) - get_user_id_patch = patch( - f"flask.current_app.{idp}_client.get_user_id", mocked_get_user_id + get_auth_info_patch = None + if get_auth_info_value: + mocked_get_auth_info = MagicMock(return_value=get_auth_info_value) + get_auth_info_patch = patch( + f"flask.current_app.{idp}_client.get_auth_info", mocked_get_auth_info ) - get_user_id_patch.start() + get_auth_info_patch.start() with audit_service_mocker as audit_service_requests: audit_service_requests.post.return_value = MockResponse( @@ -483,8 +483,8 @@ def test_login_log_login_endpoint( }, ) - if get_user_id_patch: - get_user_id_patch.stop() + if get_auth_info_patch: + get_auth_info_patch.stop() ########################## @@ -601,11 +601,11 @@ def test_login_log_push_to_sqs( mocked_sqs = mock_audit_service_sqs(app) username = "test@test" - mocked_get_user_id = MagicMock(return_value={"email": username}) - get_user_id_patch = patch( - "flask.current_app.google_client.get_user_id", mocked_get_user_id + mocked_get_auth_info = MagicMock(return_value={"email": username}) + get_auth_info_patch = patch( + "flask.current_app.google_client.get_auth_info", mocked_get_auth_info ) - get_user_id_patch.start() + get_auth_info_patch.start() path = "/login/google/login" response = client.get(path) @@ -613,4 +613,4 @@ def test_login_log_push_to_sqs( # not checking the parameters here because we can't json.dumps "sub: ANY" mocked_sqs.send_message.assert_called_once() - get_user_id_patch.stop() + get_auth_info_patch.stop() diff --git a/travis/pg_hba.conf b/travis/pg_hba.conf new file mode 100644 index 000000000..e080219fd --- /dev/null +++ b/travis/pg_hba.conf @@ -0,0 +1,10 @@ +# This config file will be used for the Travis test run. +# +# The new PostgreSQL 13 changes some settings from what they originally were +# in Travis, so we'll set them back. In particular we want to enable +# passwordless authentication for connections to PostgreSQL. +# Source: https://github.com/NCI-GDC/psqlgraph/blob/94f315db2c039217752cba85d9c63988f2059317/travis/pg_hba.conf +local all postgres trust +local all all trust +host all all 127.0.0.1/32 trust +host all all ::1/128 trust diff --git a/travis/postgresql.conf b/travis/postgresql.conf new file mode 100644 index 000000000..d3959e564 --- /dev/null +++ b/travis/postgresql.conf @@ -0,0 +1,32 @@ +# This config file will be used for PostgreSQL 13 because Travis doesn't +# have configurations set up for it yet. The most important part will be the +# ramfs storage location change. It also defaults to port 5433 so we need to +# change that back, too. +# Copied from https://github.com/NCI-GDC/psqlgraph/blob/94f315db2c039217752cba85d9c63988f2059317/travis/postgresql.conf +data_directory = '/var/ramfs/postgresql/13/main' +hba_file = '/etc/postgresql/13/main/pg_hba.conf' +ident_file = '/etc/postgresql/13/main/pg_ident.conf' +external_pid_file = '/var/run/postgresql/13-main.pid' +port = 5432 +max_connections = 255 +unix_socket_directories = '/var/run/postgresql' +ssl = on +ssl_cert_file = '/etc/ssl/certs/ssl-cert-snakeoil.pem' +ssl_key_file = '/etc/ssl/private/ssl-cert-snakeoil.key' +shared_buffers = 128MB +dynamic_shared_memory_type = posix +max_wal_size = 256MB +min_wal_size = 80MB +log_line_prefix = '%t ' +log_timezone = 'UTC' +cluster_name = '13/main' +stats_temp_directory = '/var/run/postgresql/13-main.pg_stat_tmp' +datestyle = 'iso, mdy' +timezone = 'UTC' +lc_messages = 'en_US.UTF-8' +lc_monetary = 'en_US.UTF-8' +lc_numeric = 'en_US.UTF-8' +lc_time = 'en_US.UTF-8' +default_text_search_config = 'pg_catalog.english' +include_dir = 'conf.d' +fsync = false