repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
listlengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
listlengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
bachya/regenmaschine
regenmaschine/client.py
_raise_for_remote_status
def _raise_for_remote_status(url: str, data: dict) -> None: """Raise an error from the remote API if necessary.""" if data.get('errorType') and data['errorType'] > 0: raise_remote_error(data['errorType']) if data.get('statusCode') and data['statusCode'] != 200: raise RequestError( 'Error requesting data from {0}: {1} {2}'.format( url, data['statusCode'], data['message']))
python
def _raise_for_remote_status(url: str, data: dict) -> None: """Raise an error from the remote API if necessary.""" if data.get('errorType') and data['errorType'] > 0: raise_remote_error(data['errorType']) if data.get('statusCode') and data['statusCode'] != 200: raise RequestError( 'Error requesting data from {0}: {1} {2}'.format( url, data['statusCode'], data['message']))
[ "def", "_raise_for_remote_status", "(", "url", ":", "str", ",", "data", ":", "dict", ")", "->", "None", ":", "if", "data", ".", "get", "(", "'errorType'", ")", "and", "data", "[", "'errorType'", "]", ">", "0", ":", "raise_remote_error", "(", "data", "[...
Raise an error from the remote API if necessary.
[ "Raise", "an", "error", "from", "the", "remote", "API", "if", "necessary", "." ]
train
https://github.com/bachya/regenmaschine/blob/99afb648fe454dc4a7d5db85a02a8b3b5d26f8bc/regenmaschine/client.py#L140-L148
bachya/regenmaschine
regenmaschine/client.py
login
async def login( host: str, password: str, websession: ClientSession, *, port: int = 8080, ssl: bool = True, request_timeout: int = DEFAULT_TIMEOUT) -> Controller: """Authenticate against a RainMachine device.""" print('regenmaschine.client.login() is deprecated; see documentation!') client = Client(websession, request_timeout) await client.load_local(host, password, port, ssl) return next(iter(client.controllers.values()))
python
async def login( host: str, password: str, websession: ClientSession, *, port: int = 8080, ssl: bool = True, request_timeout: int = DEFAULT_TIMEOUT) -> Controller: """Authenticate against a RainMachine device.""" print('regenmaschine.client.login() is deprecated; see documentation!') client = Client(websession, request_timeout) await client.load_local(host, password, port, ssl) return next(iter(client.controllers.values()))
[ "async", "def", "login", "(", "host", ":", "str", ",", "password", ":", "str", ",", "websession", ":", "ClientSession", ",", "*", ",", "port", ":", "int", "=", "8080", ",", "ssl", ":", "bool", "=", "True", ",", "request_timeout", ":", "int", "=", "...
Authenticate against a RainMachine device.
[ "Authenticate", "against", "a", "RainMachine", "device", "." ]
train
https://github.com/bachya/regenmaschine/blob/99afb648fe454dc4a7d5db85a02a8b3b5d26f8bc/regenmaschine/client.py#L151-L163
bachya/regenmaschine
regenmaschine/client.py
Client.load_local
async def load_local( # pylint: disable=too-many-arguments self, host: str, password: str, port: int = DEFAULT_LOCAL_PORT, ssl: bool = True, skip_existing: bool = True) -> None: """Create a local client.""" controller = LocalController( self._request, host, port, ssl, self._websession) await controller.login(password) wifi_data = await controller.provisioning.wifi() if skip_existing and wifi_data['macAddress'] in self.controllers: return version_data = await controller.api.versions() controller.api_version = version_data['apiVer'] controller.hardware_version = version_data['hwVer'] controller.mac = wifi_data['macAddress'] controller.name = await controller.provisioning.device_name controller.software_version = version_data['swVer'] self.controllers[controller.mac] = controller
python
async def load_local( # pylint: disable=too-many-arguments self, host: str, password: str, port: int = DEFAULT_LOCAL_PORT, ssl: bool = True, skip_existing: bool = True) -> None: """Create a local client.""" controller = LocalController( self._request, host, port, ssl, self._websession) await controller.login(password) wifi_data = await controller.provisioning.wifi() if skip_existing and wifi_data['macAddress'] in self.controllers: return version_data = await controller.api.versions() controller.api_version = version_data['apiVer'] controller.hardware_version = version_data['hwVer'] controller.mac = wifi_data['macAddress'] controller.name = await controller.provisioning.device_name controller.software_version = version_data['swVer'] self.controllers[controller.mac] = controller
[ "async", "def", "load_local", "(", "# pylint: disable=too-many-arguments", "self", ",", "host", ":", "str", ",", "password", ":", "str", ",", "port", ":", "int", "=", "DEFAULT_LOCAL_PORT", ",", "ssl", ":", "bool", "=", "True", ",", "skip_existing", ":", "boo...
Create a local client.
[ "Create", "a", "local", "client", "." ]
train
https://github.com/bachya/regenmaschine/blob/99afb648fe454dc4a7d5db85a02a8b3b5d26f8bc/regenmaschine/client.py#L31-L54
bachya/regenmaschine
regenmaschine/client.py
Client.load_remote
async def load_remote( self, email: str, password: str, skip_existing: bool = True) -> None: """Create a local client.""" auth_resp = await self._request( 'post', 'https://my.rainmachine.com/login/auth', json={'user': { 'email': email, 'pwd': password, 'remember': 1 }}) access_token = auth_resp['access_token'] sprinklers_resp = await self._request( 'post', 'https://my.rainmachine.com/devices/get-sprinklers', access_token=access_token, json={'user': { 'email': email, 'pwd': password, 'remember': 1 }}) for sprinkler in sprinklers_resp['sprinklers']: if skip_existing and sprinkler['mac'] in self.controllers: continue controller = RemoteController(self._request, self._websession) await controller.login( access_token, sprinkler['sprinklerId'], password) version_data = await controller.api.versions() controller.api_version = version_data['apiVer'] controller.hardware_version = version_data['hwVer'] controller.mac = sprinkler['mac'] controller.name = sprinkler['name'] controller.software_version = version_data['swVer'] self.controllers[sprinkler['mac']] = controller
python
async def load_remote( self, email: str, password: str, skip_existing: bool = True) -> None: """Create a local client.""" auth_resp = await self._request( 'post', 'https://my.rainmachine.com/login/auth', json={'user': { 'email': email, 'pwd': password, 'remember': 1 }}) access_token = auth_resp['access_token'] sprinklers_resp = await self._request( 'post', 'https://my.rainmachine.com/devices/get-sprinklers', access_token=access_token, json={'user': { 'email': email, 'pwd': password, 'remember': 1 }}) for sprinkler in sprinklers_resp['sprinklers']: if skip_existing and sprinkler['mac'] in self.controllers: continue controller = RemoteController(self._request, self._websession) await controller.login( access_token, sprinkler['sprinklerId'], password) version_data = await controller.api.versions() controller.api_version = version_data['apiVer'] controller.hardware_version = version_data['hwVer'] controller.mac = sprinkler['mac'] controller.name = sprinkler['name'] controller.software_version = version_data['swVer'] self.controllers[sprinkler['mac']] = controller
[ "async", "def", "load_remote", "(", "self", ",", "email", ":", "str", ",", "password", ":", "str", ",", "skip_existing", ":", "bool", "=", "True", ")", "->", "None", ":", "auth_resp", "=", "await", "self", ".", "_request", "(", "'post'", ",", "'https:/...
Create a local client.
[ "Create", "a", "local", "client", "." ]
train
https://github.com/bachya/regenmaschine/blob/99afb648fe454dc4a7d5db85a02a8b3b5d26f8bc/regenmaschine/client.py#L56-L96
bachya/regenmaschine
regenmaschine/client.py
Client._request
async def _request( self, method: str, url: str, *, access_token: str = None, access_token_expiration: datetime = None, headers: dict = None, params: dict = None, json: dict = None, ssl: bool = True) -> dict: """Make a request against the RainMachine device.""" if (access_token_expiration and datetime.now() >= access_token_expiration): raise TokenExpiredError('Long-lived access token has expired') if not headers: headers = {} headers.update({'Content-Type': 'application/json'}) if not params: params = {} if access_token: params.update({'access_token': access_token}) try: async with async_timeout.timeout(self.request_timeout): async with self._websession.request( method, url, headers=headers, params=params, json=json, ssl=ssl) as resp: resp.raise_for_status() data = await resp.json(content_type=None) _raise_for_remote_status(url, data) except ClientError as err: raise RequestError( 'Error requesting data from {0}: {1}'.format(url, err)) except asyncio.TimeoutError: raise RequestError('Timeout during request: {0}'.format(url)) return data
python
async def _request( self, method: str, url: str, *, access_token: str = None, access_token_expiration: datetime = None, headers: dict = None, params: dict = None, json: dict = None, ssl: bool = True) -> dict: """Make a request against the RainMachine device.""" if (access_token_expiration and datetime.now() >= access_token_expiration): raise TokenExpiredError('Long-lived access token has expired') if not headers: headers = {} headers.update({'Content-Type': 'application/json'}) if not params: params = {} if access_token: params.update({'access_token': access_token}) try: async with async_timeout.timeout(self.request_timeout): async with self._websession.request( method, url, headers=headers, params=params, json=json, ssl=ssl) as resp: resp.raise_for_status() data = await resp.json(content_type=None) _raise_for_remote_status(url, data) except ClientError as err: raise RequestError( 'Error requesting data from {0}: {1}'.format(url, err)) except asyncio.TimeoutError: raise RequestError('Timeout during request: {0}'.format(url)) return data
[ "async", "def", "_request", "(", "self", ",", "method", ":", "str", ",", "url", ":", "str", ",", "*", ",", "access_token", ":", "str", "=", "None", ",", "access_token_expiration", ":", "datetime", "=", "None", ",", "headers", ":", "dict", "=", "None", ...
Make a request against the RainMachine device.
[ "Make", "a", "request", "against", "the", "RainMachine", "device", "." ]
train
https://github.com/bachya/regenmaschine/blob/99afb648fe454dc4a7d5db85a02a8b3b5d26f8bc/regenmaschine/client.py#L98-L137
bachya/regenmaschine
regenmaschine/errors.py
raise_remote_error
def raise_remote_error(error_code: int) -> None: """Raise the appropriate error with a remote error code.""" try: error = next((v for k, v in ERROR_CODES.items() if k == error_code)) raise RequestError(error) except StopIteration: raise RequestError( 'Unknown remote error code returned: {0}'.format(error_code))
python
def raise_remote_error(error_code: int) -> None: """Raise the appropriate error with a remote error code.""" try: error = next((v for k, v in ERROR_CODES.items() if k == error_code)) raise RequestError(error) except StopIteration: raise RequestError( 'Unknown remote error code returned: {0}'.format(error_code))
[ "def", "raise_remote_error", "(", "error_code", ":", "int", ")", "->", "None", ":", "try", ":", "error", "=", "next", "(", "(", "v", "for", "k", ",", "v", "in", "ERROR_CODES", ".", "items", "(", ")", "if", "k", "==", "error_code", ")", ")", "raise"...
Raise the appropriate error with a remote error code.
[ "Raise", "the", "appropriate", "error", "with", "a", "remote", "error", "code", "." ]
train
https://github.com/bachya/regenmaschine/blob/99afb648fe454dc4a7d5db85a02a8b3b5d26f8bc/regenmaschine/errors.py#L27-L34
workforce-data-initiative/skills-utils
skills_utils/metta.py
quarter_boundaries
def quarter_boundaries(quarter): """Returns first and last day of a quarter Args: quarter (str) quarter, in format '2015Q1' Returns: (tuple) datetime.dates for the first and last days of the quarter """ year, quarter = quarter.split('Q') year = int(year) quarter = int(quarter) first_month_of_quarter = 3 * quarter - 2 last_month_of_quarter = 3 * quarter first_day = date(year, first_month_of_quarter, 1) last_day = date(year, last_month_of_quarter, monthrange(year, last_month_of_quarter)[1]) return first_day, last_day
python
def quarter_boundaries(quarter): """Returns first and last day of a quarter Args: quarter (str) quarter, in format '2015Q1' Returns: (tuple) datetime.dates for the first and last days of the quarter """ year, quarter = quarter.split('Q') year = int(year) quarter = int(quarter) first_month_of_quarter = 3 * quarter - 2 last_month_of_quarter = 3 * quarter first_day = date(year, first_month_of_quarter, 1) last_day = date(year, last_month_of_quarter, monthrange(year, last_month_of_quarter)[1]) return first_day, last_day
[ "def", "quarter_boundaries", "(", "quarter", ")", ":", "year", ",", "quarter", "=", "quarter", ".", "split", "(", "'Q'", ")", "year", "=", "int", "(", "year", ")", "quarter", "=", "int", "(", "quarter", ")", "first_month_of_quarter", "=", "3", "*", "qu...
Returns first and last day of a quarter Args: quarter (str) quarter, in format '2015Q1' Returns: (tuple) datetime.dates for the first and last days of the quarter
[ "Returns", "first", "and", "last", "day", "of", "a", "quarter" ]
train
https://github.com/workforce-data-initiative/skills-utils/blob/4cf9b7c2938984f34bbcc33d45482d23c52c7539/skills_utils/metta.py#L9-L24
workforce-data-initiative/skills-utils
skills_utils/metta.py
metta_config
def metta_config(quarter, num_dimensions): """Returns metta metadata for a quarter's SOC code classifier matrix Args: quarter (str) quarter, in format '2015Q1' num_dimensions (int) Number of features in matrix Returns: (dict) metadata suitable for metta.archive_train_test """ first_day, last_day = quarter_boundaries(quarter) return { 'start_time': first_day, 'end_time': last_day, 'prediction_window': 3, # ??? 'label_name': 'onet_soc_code', 'label_type': 'categorical', 'matrix_id': 'job_postings_{}'.format(quarter), 'feature_names': ['doc2vec_{}'.format(i) for i in range(num_dimensions)], }
python
def metta_config(quarter, num_dimensions): """Returns metta metadata for a quarter's SOC code classifier matrix Args: quarter (str) quarter, in format '2015Q1' num_dimensions (int) Number of features in matrix Returns: (dict) metadata suitable for metta.archive_train_test """ first_day, last_day = quarter_boundaries(quarter) return { 'start_time': first_day, 'end_time': last_day, 'prediction_window': 3, # ??? 'label_name': 'onet_soc_code', 'label_type': 'categorical', 'matrix_id': 'job_postings_{}'.format(quarter), 'feature_names': ['doc2vec_{}'.format(i) for i in range(num_dimensions)], }
[ "def", "metta_config", "(", "quarter", ",", "num_dimensions", ")", ":", "first_day", ",", "last_day", "=", "quarter_boundaries", "(", "quarter", ")", "return", "{", "'start_time'", ":", "first_day", ",", "'end_time'", ":", "last_day", ",", "'prediction_window'", ...
Returns metta metadata for a quarter's SOC code classifier matrix Args: quarter (str) quarter, in format '2015Q1' num_dimensions (int) Number of features in matrix Returns: (dict) metadata suitable for metta.archive_train_test
[ "Returns", "metta", "metadata", "for", "a", "quarter", "s", "SOC", "code", "classifier", "matrix" ]
train
https://github.com/workforce-data-initiative/skills-utils/blob/4cf9b7c2938984f34bbcc33d45482d23c52c7539/skills_utils/metta.py#L27-L45
workforce-data-initiative/skills-utils
skills_utils/metta.py
upload_to_metta
def upload_to_metta(train_features_path, train_labels_path, test_features_path, test_labels_path, train_quarter, test_quarter, num_dimensions): """Store train and test matrices using metta Args: train_features_path (str) Path to matrix with train features train_labels_path (str) Path to matrix with train labels test_features_path (str) Path to matrix with test features test_labels_path (str) Path to matrix with test labels train_quarter (str) Quarter of train matrix test_quarter (str) Quarter of test matrix num_dimensions (int) Number of features """ train_config = metta_config(train_quarter, num_dimensions) test_config = metta_config(test_quarter, num_dimensions) X_train = pd.read_csv(train_features_path, sep=',') X_train.columns = ['doc2vec_'+str(i) for i in range(X_train.shape[1])] #X_train['label'] = pd.Series([randint(0,23) for i in range(len(X_train))]) Y_train = pd.read_csv(train_labels_path) Y_train.columns = ['onet_soc_code'] train = pd.concat([X_train, Y_train], axis=1) X_test = pd.read_csv(test_features_path, sep=',') X_test.columns = ['doc2vec_'+str(i) for i in range(X_test.shape[1])] #X_test['label'] = pd.Series([randint(0,23) for i in range(len(X_test))]) Y_test = pd.read_csv(test_labels_path) Y_test.columns = ['onet_soc_code'] test = pd.concat([X_test, Y_test], axis=1) #print(train.head()) #print(train.shape) #print(test.head()) #print(test.shape) metta.archive_train_test( train_config, X_train, test_config, X_test, directory='wdi' )
python
def upload_to_metta(train_features_path, train_labels_path, test_features_path, test_labels_path, train_quarter, test_quarter, num_dimensions): """Store train and test matrices using metta Args: train_features_path (str) Path to matrix with train features train_labels_path (str) Path to matrix with train labels test_features_path (str) Path to matrix with test features test_labels_path (str) Path to matrix with test labels train_quarter (str) Quarter of train matrix test_quarter (str) Quarter of test matrix num_dimensions (int) Number of features """ train_config = metta_config(train_quarter, num_dimensions) test_config = metta_config(test_quarter, num_dimensions) X_train = pd.read_csv(train_features_path, sep=',') X_train.columns = ['doc2vec_'+str(i) for i in range(X_train.shape[1])] #X_train['label'] = pd.Series([randint(0,23) for i in range(len(X_train))]) Y_train = pd.read_csv(train_labels_path) Y_train.columns = ['onet_soc_code'] train = pd.concat([X_train, Y_train], axis=1) X_test = pd.read_csv(test_features_path, sep=',') X_test.columns = ['doc2vec_'+str(i) for i in range(X_test.shape[1])] #X_test['label'] = pd.Series([randint(0,23) for i in range(len(X_test))]) Y_test = pd.read_csv(test_labels_path) Y_test.columns = ['onet_soc_code'] test = pd.concat([X_test, Y_test], axis=1) #print(train.head()) #print(train.shape) #print(test.head()) #print(test.shape) metta.archive_train_test( train_config, X_train, test_config, X_test, directory='wdi' )
[ "def", "upload_to_metta", "(", "train_features_path", ",", "train_labels_path", ",", "test_features_path", ",", "test_labels_path", ",", "train_quarter", ",", "test_quarter", ",", "num_dimensions", ")", ":", "train_config", "=", "metta_config", "(", "train_quarter", ","...
Store train and test matrices using metta Args: train_features_path (str) Path to matrix with train features train_labels_path (str) Path to matrix with train labels test_features_path (str) Path to matrix with test features test_labels_path (str) Path to matrix with test labels train_quarter (str) Quarter of train matrix test_quarter (str) Quarter of test matrix num_dimensions (int) Number of features
[ "Store", "train", "and", "test", "matrices", "using", "metta" ]
train
https://github.com/workforce-data-initiative/skills-utils/blob/4cf9b7c2938984f34bbcc33d45482d23c52c7539/skills_utils/metta.py#L47-L85
devopshq/crosspm
crosspm/helpers/usedby.py
Usedby.usedby_packages
def usedby_packages(self, deps_file_path=None, depslock_file_path=None, packages=None): """ Lock packages. Downloader search packages """ if deps_file_path is None: deps_file_path = self._deps_path if depslock_file_path is None: depslock_file_path = self._depslock_path if deps_file_path == depslock_file_path: depslock_file_path += '.lock' if packages is None: self.search_dependencies(deps_file_path) else: self._root_package.packages = packages self._log.info('Done!')
python
def usedby_packages(self, deps_file_path=None, depslock_file_path=None, packages=None): """ Lock packages. Downloader search packages """ if deps_file_path is None: deps_file_path = self._deps_path if depslock_file_path is None: depslock_file_path = self._depslock_path if deps_file_path == depslock_file_path: depslock_file_path += '.lock' if packages is None: self.search_dependencies(deps_file_path) else: self._root_package.packages = packages self._log.info('Done!')
[ "def", "usedby_packages", "(", "self", ",", "deps_file_path", "=", "None", ",", "depslock_file_path", "=", "None", ",", "packages", "=", "None", ")", ":", "if", "deps_file_path", "is", "None", ":", "deps_file_path", "=", "self", ".", "_deps_path", "if", "dep...
Lock packages. Downloader search packages
[ "Lock", "packages", ".", "Downloader", "search", "packages" ]
train
https://github.com/devopshq/crosspm/blob/c831442ecfaa1d43c66cb148857096cea292c950/crosspm/helpers/usedby.py#L11-L26
workforce-data-initiative/skills-utils
skills_utils/s3.py
upload
def upload(s3_conn, filepath, s3_path): """Uploads the given file to s3 Args: s3_conn: (boto.s3.connection) an s3 connection filepath (str) the local filename s3_path (str) the destination path on s3 """ bucket_name, prefix = split_s3_path(s3_path) bucket = s3_conn.get_bucket(bucket_name) filename = os.path.basename(filepath) key = boto.s3.key.Key( bucket=bucket, name='{}/{}'.format(prefix, filename) ) logging.info('uploading from %s to %s', filepath, key) key.set_contents_from_filename(filepath)
python
def upload(s3_conn, filepath, s3_path): """Uploads the given file to s3 Args: s3_conn: (boto.s3.connection) an s3 connection filepath (str) the local filename s3_path (str) the destination path on s3 """ bucket_name, prefix = split_s3_path(s3_path) bucket = s3_conn.get_bucket(bucket_name) filename = os.path.basename(filepath) key = boto.s3.key.Key( bucket=bucket, name='{}/{}'.format(prefix, filename) ) logging.info('uploading from %s to %s', filepath, key) key.set_contents_from_filename(filepath)
[ "def", "upload", "(", "s3_conn", ",", "filepath", ",", "s3_path", ")", ":", "bucket_name", ",", "prefix", "=", "split_s3_path", "(", "s3_path", ")", "bucket", "=", "s3_conn", ".", "get_bucket", "(", "bucket_name", ")", "filename", "=", "os", ".", "path", ...
Uploads the given file to s3 Args: s3_conn: (boto.s3.connection) an s3 connection filepath (str) the local filename s3_path (str) the destination path on s3
[ "Uploads", "the", "given", "file", "to", "s3" ]
train
https://github.com/workforce-data-initiative/skills-utils/blob/4cf9b7c2938984f34bbcc33d45482d23c52c7539/skills_utils/s3.py#L24-L41
workforce-data-initiative/skills-utils
skills_utils/s3.py
upload_dict
def upload_dict(s3_conn, s3_prefix, data_to_sync): """Syncs a dictionary to an S3 bucket, serializing each value in the dictionary as a JSON file with the key as its name. Args: s3_conn: (boto.s3.connection) an s3 connection s3_prefix: (str) the destination prefix data_to_sync: (dict) """ bucket_name, prefix = split_s3_path(s3_prefix) bucket = s3_conn.get_bucket(bucket_name) for key, value in data_to_sync.items(): full_name = '{}/{}.json'.format(prefix, key) s3_key = boto.s3.key.Key( bucket=bucket, name=full_name ) logging.info('uploading key %s', full_name) s3_key.set_contents_from_string(json.dumps(value))
python
def upload_dict(s3_conn, s3_prefix, data_to_sync): """Syncs a dictionary to an S3 bucket, serializing each value in the dictionary as a JSON file with the key as its name. Args: s3_conn: (boto.s3.connection) an s3 connection s3_prefix: (str) the destination prefix data_to_sync: (dict) """ bucket_name, prefix = split_s3_path(s3_prefix) bucket = s3_conn.get_bucket(bucket_name) for key, value in data_to_sync.items(): full_name = '{}/{}.json'.format(prefix, key) s3_key = boto.s3.key.Key( bucket=bucket, name=full_name ) logging.info('uploading key %s', full_name) s3_key.set_contents_from_string(json.dumps(value))
[ "def", "upload_dict", "(", "s3_conn", ",", "s3_prefix", ",", "data_to_sync", ")", ":", "bucket_name", ",", "prefix", "=", "split_s3_path", "(", "s3_prefix", ")", "bucket", "=", "s3_conn", ".", "get_bucket", "(", "bucket_name", ")", "for", "key", ",", "value"...
Syncs a dictionary to an S3 bucket, serializing each value in the dictionary as a JSON file with the key as its name. Args: s3_conn: (boto.s3.connection) an s3 connection s3_prefix: (str) the destination prefix data_to_sync: (dict)
[ "Syncs", "a", "dictionary", "to", "an", "S3", "bucket", "serializing", "each", "value", "in", "the", "dictionary", "as", "a", "JSON", "file", "with", "the", "key", "as", "its", "name", "." ]
train
https://github.com/workforce-data-initiative/skills-utils/blob/4cf9b7c2938984f34bbcc33d45482d23c52c7539/skills_utils/s3.py#L44-L63
workforce-data-initiative/skills-utils
skills_utils/s3.py
download
def download(s3_conn, out_filename, s3_path): """Downloads the given s3_path Args: s3_conn (boto.s3.connection) a boto s3 connection out_filename (str) local filename to save the file s3_path (str) the source path on s3 """ bucket_name, prefix = split_s3_path(s3_path) bucket = s3_conn.get_bucket(bucket_name) key = boto.s3.key.Key( bucket=bucket, name=prefix ) logging.info('loading from %s into %s', key, out_filename) key.get_contents_to_filename(out_filename, cb=log_download_progress)
python
def download(s3_conn, out_filename, s3_path): """Downloads the given s3_path Args: s3_conn (boto.s3.connection) a boto s3 connection out_filename (str) local filename to save the file s3_path (str) the source path on s3 """ bucket_name, prefix = split_s3_path(s3_path) bucket = s3_conn.get_bucket(bucket_name) key = boto.s3.key.Key( bucket=bucket, name=prefix ) logging.info('loading from %s into %s', key, out_filename) key.get_contents_to_filename(out_filename, cb=log_download_progress)
[ "def", "download", "(", "s3_conn", ",", "out_filename", ",", "s3_path", ")", ":", "bucket_name", ",", "prefix", "=", "split_s3_path", "(", "s3_path", ")", "bucket", "=", "s3_conn", ".", "get_bucket", "(", "bucket_name", ")", "key", "=", "boto", ".", "s3", ...
Downloads the given s3_path Args: s3_conn (boto.s3.connection) a boto s3 connection out_filename (str) local filename to save the file s3_path (str) the source path on s3
[ "Downloads", "the", "given", "s3_path" ]
train
https://github.com/workforce-data-initiative/skills-utils/blob/4cf9b7c2938984f34bbcc33d45482d23c52c7539/skills_utils/s3.py#L66-L81
bachya/regenmaschine
regenmaschine/controller.py
Controller._request
async def _request( self, method: str, endpoint: str, *, headers: dict = None, params: dict = None, json: dict = None, ssl: bool = True) -> dict: """Wrap the generic request method to add access token, etc.""" return await self._client_request( method, '{0}/{1}'.format(self._host, endpoint), access_token=self._access_token, access_token_expiration=self._access_token_expiration, headers=headers, params=params, json=json, ssl=ssl)
python
async def _request( self, method: str, endpoint: str, *, headers: dict = None, params: dict = None, json: dict = None, ssl: bool = True) -> dict: """Wrap the generic request method to add access token, etc.""" return await self._client_request( method, '{0}/{1}'.format(self._host, endpoint), access_token=self._access_token, access_token_expiration=self._access_token_expiration, headers=headers, params=params, json=json, ssl=ssl)
[ "async", "def", "_request", "(", "self", ",", "method", ":", "str", ",", "endpoint", ":", "str", ",", "*", ",", "headers", ":", "dict", "=", "None", ",", "params", ":", "dict", "=", "None", ",", "json", ":", "dict", "=", "None", ",", "ssl", ":", ...
Wrap the generic request method to add access token, etc.
[ "Wrap", "the", "generic", "request", "method", "to", "add", "access", "token", "etc", "." ]
train
https://github.com/bachya/regenmaschine/blob/99afb648fe454dc4a7d5db85a02a8b3b5d26f8bc/regenmaschine/controller.py#L52-L70
bachya/regenmaschine
regenmaschine/controller.py
LocalController.login
async def login(self, password): """Authenticate against the device (locally).""" auth_resp = await self._client_request( 'post', '{0}/auth/login'.format(self._host), json={ 'pwd': password, 'remember': 1 }) self._access_token = auth_resp['access_token'] self._access_token_expiration = datetime.now() + timedelta( seconds=int(auth_resp['expires_in']) - 10)
python
async def login(self, password): """Authenticate against the device (locally).""" auth_resp = await self._client_request( 'post', '{0}/auth/login'.format(self._host), json={ 'pwd': password, 'remember': 1 }) self._access_token = auth_resp['access_token'] self._access_token_expiration = datetime.now() + timedelta( seconds=int(auth_resp['expires_in']) - 10)
[ "async", "def", "login", "(", "self", ",", "password", ")", ":", "auth_resp", "=", "await", "self", ".", "_client_request", "(", "'post'", ",", "'{0}/auth/login'", ".", "format", "(", "self", ".", "_host", ")", ",", "json", "=", "{", "'pwd'", ":", "pas...
Authenticate against the device (locally).
[ "Authenticate", "against", "the", "device", "(", "locally", ")", "." ]
train
https://github.com/bachya/regenmaschine/blob/99afb648fe454dc4a7d5db85a02a8b3b5d26f8bc/regenmaschine/controller.py#L85-L97
bachya/regenmaschine
regenmaschine/controller.py
RemoteController.login
async def login( self, stage_1_access_token: str, sprinkler_id: str, password: str) -> None: """Authenticate against the device (remotely).""" auth_resp = await self._client_request( 'post', 'https://my.rainmachine.com/devices/login-sprinkler', access_token=stage_1_access_token, json={ 'sprinklerId': sprinkler_id, 'pwd': password, }) self._access_token = auth_resp['access_token'] self._host = URL_BASE_REMOTE.format(sprinkler_id)
python
async def login( self, stage_1_access_token: str, sprinkler_id: str, password: str) -> None: """Authenticate against the device (remotely).""" auth_resp = await self._client_request( 'post', 'https://my.rainmachine.com/devices/login-sprinkler', access_token=stage_1_access_token, json={ 'sprinklerId': sprinkler_id, 'pwd': password, }) self._access_token = auth_resp['access_token'] self._host = URL_BASE_REMOTE.format(sprinkler_id)
[ "async", "def", "login", "(", "self", ",", "stage_1_access_token", ":", "str", ",", "sprinkler_id", ":", "str", ",", "password", ":", "str", ")", "->", "None", ":", "auth_resp", "=", "await", "self", ".", "_client_request", "(", "'post'", ",", "'https://my...
Authenticate against the device (remotely).
[ "Authenticate", "against", "the", "device", "(", "remotely", ")", "." ]
train
https://github.com/bachya/regenmaschine/blob/99afb648fe454dc4a7d5db85a02a8b3b5d26f8bc/regenmaschine/controller.py#L103-L117
devopshq/crosspm
crosspm/helpers/downloader.py
Downloader.set_duplicated_flag
def set_duplicated_flag(self): """ For all package set flag duplicated, if it's not unique package :return: """ package_by_name = defaultdict(list) for package1 in self._root_package.all_packages: if package1 is None: continue pkg_name = package1.package_name param_list = self._config.get_fails('unique', {}) params1 = package1.get_params(param_list) for package2 in package_by_name[pkg_name]: params2 = package2.get_params(param_list) for x in param_list: # START HACK for cached archive param1 = params1[x] param2 = params2[x] if isinstance(param1, list): param1 = [str(x) for x in param1] if isinstance(param2, list): param2 = [str(x) for x in param2] # END if str(param1) != str(param2): package1.duplicated = True package2.duplicated = True package_by_name[pkg_name].append(package1)
python
def set_duplicated_flag(self): """ For all package set flag duplicated, if it's not unique package :return: """ package_by_name = defaultdict(list) for package1 in self._root_package.all_packages: if package1 is None: continue pkg_name = package1.package_name param_list = self._config.get_fails('unique', {}) params1 = package1.get_params(param_list) for package2 in package_by_name[pkg_name]: params2 = package2.get_params(param_list) for x in param_list: # START HACK for cached archive param1 = params1[x] param2 = params2[x] if isinstance(param1, list): param1 = [str(x) for x in param1] if isinstance(param2, list): param2 = [str(x) for x in param2] # END if str(param1) != str(param2): package1.duplicated = True package2.duplicated = True package_by_name[pkg_name].append(package1)
[ "def", "set_duplicated_flag", "(", "self", ")", ":", "package_by_name", "=", "defaultdict", "(", "list", ")", "for", "package1", "in", "self", ".", "_root_package", ".", "all_packages", ":", "if", "package1", "is", "None", ":", "continue", "pkg_name", "=", "...
For all package set flag duplicated, if it's not unique package :return:
[ "For", "all", "package", "set", "flag", "duplicated", "if", "it", "s", "not", "unique", "package", ":", "return", ":" ]
train
https://github.com/devopshq/crosspm/blob/c831442ecfaa1d43c66cb148857096cea292c950/crosspm/helpers/downloader.py#L189-L217
devopshq/crosspm
crosspm/helpers/python.py
get_object_from_string
def get_object_from_string(object_path): """ Return python object from string :param object_path: e.g os.path.join :return: python object """ # split like crosspm.template.GUS => crosspm.template, GUS try: module_name, object_name = object_path.rsplit('.', maxsplit=1) module_ = __import__(module_name, globals(), locals(), ['App'], 0) variable_ = getattr(module_, object_name) except Exception: variable_ = None return variable_
python
def get_object_from_string(object_path): """ Return python object from string :param object_path: e.g os.path.join :return: python object """ # split like crosspm.template.GUS => crosspm.template, GUS try: module_name, object_name = object_path.rsplit('.', maxsplit=1) module_ = __import__(module_name, globals(), locals(), ['App'], 0) variable_ = getattr(module_, object_name) except Exception: variable_ = None return variable_
[ "def", "get_object_from_string", "(", "object_path", ")", ":", "# split like crosspm.template.GUS => crosspm.template, GUS", "try", ":", "module_name", ",", "object_name", "=", "object_path", ".", "rsplit", "(", "'.'", ",", "maxsplit", "=", "1", ")", "module_", "=", ...
Return python object from string :param object_path: e.g os.path.join :return: python object
[ "Return", "python", "object", "from", "string", ":", "param", "object_path", ":", "e", ".", "g", "os", ".", "path", ".", "join", ":", "return", ":", "python", "object" ]
train
https://github.com/devopshq/crosspm/blob/c831442ecfaa1d43c66cb148857096cea292c950/crosspm/helpers/python.py#L1-L14
bachya/regenmaschine
regenmaschine/zone.py
Zone._post
async def _post(self, zone_id: int = None, json: dict = None) -> dict: """Post data to a (non)existing zone.""" return await self._request( 'post', 'zone/{0}/properties'.format(zone_id), json=json)
python
async def _post(self, zone_id: int = None, json: dict = None) -> dict: """Post data to a (non)existing zone.""" return await self._request( 'post', 'zone/{0}/properties'.format(zone_id), json=json)
[ "async", "def", "_post", "(", "self", ",", "zone_id", ":", "int", "=", "None", ",", "json", ":", "dict", "=", "None", ")", "->", "dict", ":", "return", "await", "self", ".", "_request", "(", "'post'", ",", "'zone/{0}/properties'", ".", "format", "(", ...
Post data to a (non)existing zone.
[ "Post", "data", "to", "a", "(", "non", ")", "existing", "zone", "." ]
train
https://github.com/bachya/regenmaschine/blob/99afb648fe454dc4a7d5db85a02a8b3b5d26f8bc/regenmaschine/zone.py#L12-L15
bachya/regenmaschine
regenmaschine/zone.py
Zone.all
async def all( self, *, details: bool = False, include_inactive: bool = False) -> list: """Return all zones (with optional advanced properties).""" endpoint = 'zone' if details: endpoint += '/properties' data = await self._request('get', endpoint) return [z for z in data['zones'] if include_inactive or z['active']]
python
async def all( self, *, details: bool = False, include_inactive: bool = False) -> list: """Return all zones (with optional advanced properties).""" endpoint = 'zone' if details: endpoint += '/properties' data = await self._request('get', endpoint) return [z for z in data['zones'] if include_inactive or z['active']]
[ "async", "def", "all", "(", "self", ",", "*", ",", "details", ":", "bool", "=", "False", ",", "include_inactive", ":", "bool", "=", "False", ")", "->", "list", ":", "endpoint", "=", "'zone'", "if", "details", ":", "endpoint", "+=", "'/properties'", "da...
Return all zones (with optional advanced properties).
[ "Return", "all", "zones", "(", "with", "optional", "advanced", "properties", ")", "." ]
train
https://github.com/bachya/regenmaschine/blob/99afb648fe454dc4a7d5db85a02a8b3b5d26f8bc/regenmaschine/zone.py#L17-L25
bachya/regenmaschine
regenmaschine/zone.py
Zone.get
async def get(self, zone_id: int, *, details: bool = False) -> dict: """Return a specific zone.""" endpoint = 'zone/{0}'.format(zone_id) if details: endpoint += '/properties' return await self._request('get', endpoint)
python
async def get(self, zone_id: int, *, details: bool = False) -> dict: """Return a specific zone.""" endpoint = 'zone/{0}'.format(zone_id) if details: endpoint += '/properties' return await self._request('get', endpoint)
[ "async", "def", "get", "(", "self", ",", "zone_id", ":", "int", ",", "*", ",", "details", ":", "bool", "=", "False", ")", "->", "dict", ":", "endpoint", "=", "'zone/{0}'", ".", "format", "(", "zone_id", ")", "if", "details", ":", "endpoint", "+=", ...
Return a specific zone.
[ "Return", "a", "specific", "zone", "." ]
train
https://github.com/bachya/regenmaschine/blob/99afb648fe454dc4a7d5db85a02a8b3b5d26f8bc/regenmaschine/zone.py#L35-L40
bachya/regenmaschine
regenmaschine/zone.py
Zone.start
async def start(self, zone_id: int, time: int) -> dict: """Start a program.""" return await self._request( 'post', 'zone/{0}/start'.format(zone_id), json={'time': time})
python
async def start(self, zone_id: int, time: int) -> dict: """Start a program.""" return await self._request( 'post', 'zone/{0}/start'.format(zone_id), json={'time': time})
[ "async", "def", "start", "(", "self", ",", "zone_id", ":", "int", ",", "time", ":", "int", ")", "->", "dict", ":", "return", "await", "self", ".", "_request", "(", "'post'", ",", "'zone/{0}/start'", ".", "format", "(", "zone_id", ")", ",", "json", "=...
Start a program.
[ "Start", "a", "program", "." ]
train
https://github.com/bachya/regenmaschine/blob/99afb648fe454dc4a7d5db85a02a8b3b5d26f8bc/regenmaschine/zone.py#L42-L45
DanielSank/observed
observed.py
observable_method
def observable_method(func, strategy='instances'): """ I turn a method into something that can be observed by other callables. You can use me as a decorator on a method, like this: class Foo(object): __init__(self, name): self.name = name @observable_method def bar(self, x): print("%s called bar with arg: %s"%(self.name, x)) Now other functions and methods can sign up to get notified when my_func is called: def observer(x): print("observer called with arg: %s"%(x,)) a = Foo('a') b = Foo('b') a.bar.add_observer(observer) a.bar.add_observer(b.bar) a.bar('banana') >>> a called bar with arg: banana >>> b called bar with arg: banana >>> observer called with arg: banana Note that bar can be an observer as well as observed. Unregister observers like this: a.bar.discard_observer(observer) Args: func: The function (i.e. unbound method) to be made observable. strategy: This argument requires some background explanation. When observers are registered to a bound method, we need to store those observers so that we can call them when the observed method is called. There are two ways to do this as explained below. In any case, access to the observable method is managed by a descriptor, and we select which strategy we use for storing observers by using one descriptor or another. The strategy argument selects the descriptor used. The first strategy is to give each instance of the class containing the decorated method an attribute whose value is a collection of observers for each of its observable methods. This is the default strategy and is implemented in ObservableMethodManager_PersistOnInstances. The advantages of this strategy are that the code is very simple and pickling the observers along with the instance owning the observable methods is easier. The other strategy is to persist the observers for each instance inside the descriptor which manages access to that method. This strategy is implemented in ObservableMethodManager_PersistOnDescriptor. The advantage(?) of this strategy is that the observer framework doesn't paste any data onto the instances which have observable methods. It's not entirely clear that this is actually useful but we include it as an option. For the simpler strategy in which we store the observers in the instances, just use me as a decorator. If you want the alternate strategy in which the observers are stored in the descriptor, call me explicitly on the function (unbound method) you want to make observable and set strategy='descriptor'. """ if strategy == 'instances': return ObservableMethodManager_PersistOnInstances(func) elif strategy == 'descriptor': return ObservableMethodManager_PersistOnDescriptor(func) else: raise ValueError("Strategy %s not recognized"%(strategy,))
python
def observable_method(func, strategy='instances'): """ I turn a method into something that can be observed by other callables. You can use me as a decorator on a method, like this: class Foo(object): __init__(self, name): self.name = name @observable_method def bar(self, x): print("%s called bar with arg: %s"%(self.name, x)) Now other functions and methods can sign up to get notified when my_func is called: def observer(x): print("observer called with arg: %s"%(x,)) a = Foo('a') b = Foo('b') a.bar.add_observer(observer) a.bar.add_observer(b.bar) a.bar('banana') >>> a called bar with arg: banana >>> b called bar with arg: banana >>> observer called with arg: banana Note that bar can be an observer as well as observed. Unregister observers like this: a.bar.discard_observer(observer) Args: func: The function (i.e. unbound method) to be made observable. strategy: This argument requires some background explanation. When observers are registered to a bound method, we need to store those observers so that we can call them when the observed method is called. There are two ways to do this as explained below. In any case, access to the observable method is managed by a descriptor, and we select which strategy we use for storing observers by using one descriptor or another. The strategy argument selects the descriptor used. The first strategy is to give each instance of the class containing the decorated method an attribute whose value is a collection of observers for each of its observable methods. This is the default strategy and is implemented in ObservableMethodManager_PersistOnInstances. The advantages of this strategy are that the code is very simple and pickling the observers along with the instance owning the observable methods is easier. The other strategy is to persist the observers for each instance inside the descriptor which manages access to that method. This strategy is implemented in ObservableMethodManager_PersistOnDescriptor. The advantage(?) of this strategy is that the observer framework doesn't paste any data onto the instances which have observable methods. It's not entirely clear that this is actually useful but we include it as an option. For the simpler strategy in which we store the observers in the instances, just use me as a decorator. If you want the alternate strategy in which the observers are stored in the descriptor, call me explicitly on the function (unbound method) you want to make observable and set strategy='descriptor'. """ if strategy == 'instances': return ObservableMethodManager_PersistOnInstances(func) elif strategy == 'descriptor': return ObservableMethodManager_PersistOnDescriptor(func) else: raise ValueError("Strategy %s not recognized"%(strategy,))
[ "def", "observable_method", "(", "func", ",", "strategy", "=", "'instances'", ")", ":", "if", "strategy", "==", "'instances'", ":", "return", "ObservableMethodManager_PersistOnInstances", "(", "func", ")", "elif", "strategy", "==", "'descriptor'", ":", "return", "...
I turn a method into something that can be observed by other callables. You can use me as a decorator on a method, like this: class Foo(object): __init__(self, name): self.name = name @observable_method def bar(self, x): print("%s called bar with arg: %s"%(self.name, x)) Now other functions and methods can sign up to get notified when my_func is called: def observer(x): print("observer called with arg: %s"%(x,)) a = Foo('a') b = Foo('b') a.bar.add_observer(observer) a.bar.add_observer(b.bar) a.bar('banana') >>> a called bar with arg: banana >>> b called bar with arg: banana >>> observer called with arg: banana Note that bar can be an observer as well as observed. Unregister observers like this: a.bar.discard_observer(observer) Args: func: The function (i.e. unbound method) to be made observable. strategy: This argument requires some background explanation. When observers are registered to a bound method, we need to store those observers so that we can call them when the observed method is called. There are two ways to do this as explained below. In any case, access to the observable method is managed by a descriptor, and we select which strategy we use for storing observers by using one descriptor or another. The strategy argument selects the descriptor used. The first strategy is to give each instance of the class containing the decorated method an attribute whose value is a collection of observers for each of its observable methods. This is the default strategy and is implemented in ObservableMethodManager_PersistOnInstances. The advantages of this strategy are that the code is very simple and pickling the observers along with the instance owning the observable methods is easier. The other strategy is to persist the observers for each instance inside the descriptor which manages access to that method. This strategy is implemented in ObservableMethodManager_PersistOnDescriptor. The advantage(?) of this strategy is that the observer framework doesn't paste any data onto the instances which have observable methods. It's not entirely clear that this is actually useful but we include it as an option. For the simpler strategy in which we store the observers in the instances, just use me as a decorator. If you want the alternate strategy in which the observers are stored in the descriptor, call me explicitly on the function (unbound method) you want to make observable and set strategy='descriptor'.
[ "I", "turn", "a", "method", "into", "something", "that", "can", "be", "observed", "by", "other", "callables", ".", "You", "can", "use", "me", "as", "a", "decorator", "on", "a", "method", "like", "this", ":", "class", "Foo", "(", "object", ")", ":", "...
train
https://github.com/DanielSank/observed/blob/00b624b90dcff84891d4b5838899f466d884c52e/observed.py#L637-L712
DanielSank/observed
observed.py
ObservableFunction.add_observer
def add_observer(self, observer, identify_observed=False): """Register an observer to observe me. Args: observer: The callable to register as an observer. identify_observed: If True, then the observer will get myself passed as an additional first argument whenever it is invoked. See ObserverFunction and ObserverBoundMethod to see how this works. Returns: True if the observer was added, False otherwise. The observing function or method will be called whenever I am called, and with the same arguments and keyword arguments. If a bound method or function has already been registered as an observer, trying to add it again does nothing. In other words, there is no way to sign up an observer to be called back multiple times. This was a conscious design choice which users are invited to complain about if there is a compelling use case where this is inconvenient. """ # If the observer is a bound method, if hasattr(observer, "__self__"): result = self._add_bound_method(observer, identify_observed) # Otherwise, assume observer is a normal function. else: result = self._add_function(observer, identify_observed) return result
python
def add_observer(self, observer, identify_observed=False): """Register an observer to observe me. Args: observer: The callable to register as an observer. identify_observed: If True, then the observer will get myself passed as an additional first argument whenever it is invoked. See ObserverFunction and ObserverBoundMethod to see how this works. Returns: True if the observer was added, False otherwise. The observing function or method will be called whenever I am called, and with the same arguments and keyword arguments. If a bound method or function has already been registered as an observer, trying to add it again does nothing. In other words, there is no way to sign up an observer to be called back multiple times. This was a conscious design choice which users are invited to complain about if there is a compelling use case where this is inconvenient. """ # If the observer is a bound method, if hasattr(observer, "__self__"): result = self._add_bound_method(observer, identify_observed) # Otherwise, assume observer is a normal function. else: result = self._add_function(observer, identify_observed) return result
[ "def", "add_observer", "(", "self", ",", "observer", ",", "identify_observed", "=", "False", ")", ":", "# If the observer is a bound method,", "if", "hasattr", "(", "observer", ",", "\"__self__\"", ")", ":", "result", "=", "self", ".", "_add_bound_method", "(", ...
Register an observer to observe me. Args: observer: The callable to register as an observer. identify_observed: If True, then the observer will get myself passed as an additional first argument whenever it is invoked. See ObserverFunction and ObserverBoundMethod to see how this works. Returns: True if the observer was added, False otherwise. The observing function or method will be called whenever I am called, and with the same arguments and keyword arguments. If a bound method or function has already been registered as an observer, trying to add it again does nothing. In other words, there is no way to sign up an observer to be called back multiple times. This was a conscious design choice which users are invited to complain about if there is a compelling use case where this is inconvenient.
[ "Register", "an", "observer", "to", "observe", "me", "." ]
train
https://github.com/DanielSank/observed/blob/00b624b90dcff84891d4b5838899f466d884c52e/observed.py#L216-L245
DanielSank/observed
observed.py
ObservableFunction._add_function
def _add_function(self, func, identify_observed): """Add a function as an observer. Args: func: The function to register as an observer. identify_observed: See docstring for add_observer. Returns: True if the function is added, otherwise False. """ key = self.make_key(func) if key not in self.observers: self.observers[key] = ObserverFunction( func, identify_observed, (key, self.observers)) return True else: return False
python
def _add_function(self, func, identify_observed): """Add a function as an observer. Args: func: The function to register as an observer. identify_observed: See docstring for add_observer. Returns: True if the function is added, otherwise False. """ key = self.make_key(func) if key not in self.observers: self.observers[key] = ObserverFunction( func, identify_observed, (key, self.observers)) return True else: return False
[ "def", "_add_function", "(", "self", ",", "func", ",", "identify_observed", ")", ":", "key", "=", "self", ".", "make_key", "(", "func", ")", "if", "key", "not", "in", "self", ".", "observers", ":", "self", ".", "observers", "[", "key", "]", "=", "Obs...
Add a function as an observer. Args: func: The function to register as an observer. identify_observed: See docstring for add_observer. Returns: True if the function is added, otherwise False.
[ "Add", "a", "function", "as", "an", "observer", "." ]
train
https://github.com/DanielSank/observed/blob/00b624b90dcff84891d4b5838899f466d884c52e/observed.py#L247-L264
DanielSank/observed
observed.py
ObservableFunction._add_bound_method
def _add_bound_method(self, bound_method, identify_observed): """Add an bound method as an observer. Args: bound_method: The bound method to add as an observer. identify_observed: See the docstring for add_observer. Returns: True if the bound method is added, otherwise False. """ inst = bound_method.__self__ method_name = bound_method.__name__ key = self.make_key(bound_method) if key not in self.observers: self.observers[key] = ObserverBoundMethod( inst, method_name, identify_observed, (key, self.observers)) return True else: return False
python
def _add_bound_method(self, bound_method, identify_observed): """Add an bound method as an observer. Args: bound_method: The bound method to add as an observer. identify_observed: See the docstring for add_observer. Returns: True if the bound method is added, otherwise False. """ inst = bound_method.__self__ method_name = bound_method.__name__ key = self.make_key(bound_method) if key not in self.observers: self.observers[key] = ObserverBoundMethod( inst, method_name, identify_observed, (key, self.observers)) return True else: return False
[ "def", "_add_bound_method", "(", "self", ",", "bound_method", ",", "identify_observed", ")", ":", "inst", "=", "bound_method", ".", "__self__", "method_name", "=", "bound_method", ".", "__name__", "key", "=", "self", ".", "make_key", "(", "bound_method", ")", ...
Add an bound method as an observer. Args: bound_method: The bound method to add as an observer. identify_observed: See the docstring for add_observer. Returns: True if the bound method is added, otherwise False.
[ "Add", "an", "bound", "method", "as", "an", "observer", "." ]
train
https://github.com/DanielSank/observed/blob/00b624b90dcff84891d4b5838899f466d884c52e/observed.py#L266-L285
DanielSank/observed
observed.py
ObservableFunction.discard_observer
def discard_observer(self, observer): """Un-register an observer. Args: observer: The observer to un-register. Returns true if an observer was removed, otherwise False. """ discarded = False key = self.make_key(observer) if key in self.observers: del self.observers[key] discarded = True return discarded
python
def discard_observer(self, observer): """Un-register an observer. Args: observer: The observer to un-register. Returns true if an observer was removed, otherwise False. """ discarded = False key = self.make_key(observer) if key in self.observers: del self.observers[key] discarded = True return discarded
[ "def", "discard_observer", "(", "self", ",", "observer", ")", ":", "discarded", "=", "False", "key", "=", "self", ".", "make_key", "(", "observer", ")", "if", "key", "in", "self", ".", "observers", ":", "del", "self", ".", "observers", "[", "key", "]",...
Un-register an observer. Args: observer: The observer to un-register. Returns true if an observer was removed, otherwise False.
[ "Un", "-", "register", "an", "observer", "." ]
train
https://github.com/DanielSank/observed/blob/00b624b90dcff84891d4b5838899f466d884c52e/observed.py#L287-L300
DanielSank/observed
observed.py
ObservableFunction.make_key
def make_key(observer): """Construct a unique, hashable, immutable key for an observer.""" if hasattr(observer, "__self__"): inst = observer.__self__ method_name = observer.__name__ key = (id(inst), method_name) else: key = id(observer) return key
python
def make_key(observer): """Construct a unique, hashable, immutable key for an observer.""" if hasattr(observer, "__self__"): inst = observer.__self__ method_name = observer.__name__ key = (id(inst), method_name) else: key = id(observer) return key
[ "def", "make_key", "(", "observer", ")", ":", "if", "hasattr", "(", "observer", ",", "\"__self__\"", ")", ":", "inst", "=", "observer", ".", "__self__", "method_name", "=", "observer", ".", "__name__", "key", "=", "(", "id", "(", "inst", ")", ",", "met...
Construct a unique, hashable, immutable key for an observer.
[ "Construct", "a", "unique", "hashable", "immutable", "key", "for", "an", "observer", "." ]
train
https://github.com/DanielSank/observed/blob/00b624b90dcff84891d4b5838899f466d884c52e/observed.py#L303-L312
Infinidat/infi.docopt_completion
src/infi/docopt_completion/common.py
build_command_tree
def build_command_tree(pattern, cmd_params): """ Recursively fill in a command tree in cmd_params according to a docopt-parsed "pattern" object. """ from docopt import Either, Optional, OneOrMore, Required, Option, Command, Argument if type(pattern) in [Either, Optional, OneOrMore]: for child in pattern.children: build_command_tree(child, cmd_params) elif type(pattern) in [Required]: for child in pattern.children: cmd_params = build_command_tree(child, cmd_params) elif type(pattern) in [Option]: suffix = "=" if pattern.argcount else "" if pattern.short: cmd_params.options.append(pattern.short + suffix) if pattern.long: cmd_params.options.append(pattern.long + suffix) elif type(pattern) in [Command]: cmd_params = cmd_params.get_subcommand(pattern.name) elif type(pattern) in [Argument]: cmd_params.arguments.append(pattern.name) return cmd_params
python
def build_command_tree(pattern, cmd_params): """ Recursively fill in a command tree in cmd_params according to a docopt-parsed "pattern" object. """ from docopt import Either, Optional, OneOrMore, Required, Option, Command, Argument if type(pattern) in [Either, Optional, OneOrMore]: for child in pattern.children: build_command_tree(child, cmd_params) elif type(pattern) in [Required]: for child in pattern.children: cmd_params = build_command_tree(child, cmd_params) elif type(pattern) in [Option]: suffix = "=" if pattern.argcount else "" if pattern.short: cmd_params.options.append(pattern.short + suffix) if pattern.long: cmd_params.options.append(pattern.long + suffix) elif type(pattern) in [Command]: cmd_params = cmd_params.get_subcommand(pattern.name) elif type(pattern) in [Argument]: cmd_params.arguments.append(pattern.name) return cmd_params
[ "def", "build_command_tree", "(", "pattern", ",", "cmd_params", ")", ":", "from", "docopt", "import", "Either", ",", "Optional", ",", "OneOrMore", ",", "Required", ",", "Option", ",", "Command", ",", "Argument", "if", "type", "(", "pattern", ")", "in", "["...
Recursively fill in a command tree in cmd_params according to a docopt-parsed "pattern" object.
[ "Recursively", "fill", "in", "a", "command", "tree", "in", "cmd_params", "according", "to", "a", "docopt", "-", "parsed", "pattern", "object", "." ]
train
https://github.com/Infinidat/infi.docopt_completion/blob/9e53bc360b903e0a3910adcc379f26e319fef4a4/src/infi/docopt_completion/common.py#L12-L33
workforce-data-initiative/skills-utils
skills_utils/iteration.py
Batch.group
def group(self): """Yield a group from the iterable""" yield self.current # start enumerate at 1 because we already yielded the last saved item for num, item in enumerate(self.iterator, 1): self.current = item if num == self.limit: break yield item else: self.on_going = False
python
def group(self): """Yield a group from the iterable""" yield self.current # start enumerate at 1 because we already yielded the last saved item for num, item in enumerate(self.iterator, 1): self.current = item if num == self.limit: break yield item else: self.on_going = False
[ "def", "group", "(", "self", ")", ":", "yield", "self", ".", "current", "# start enumerate at 1 because we already yielded the last saved item", "for", "num", ",", "item", "in", "enumerate", "(", "self", ".", "iterator", ",", "1", ")", ":", "self", ".", "current...
Yield a group from the iterable
[ "Yield", "a", "group", "from", "the", "iterable" ]
train
https://github.com/workforce-data-initiative/skills-utils/blob/4cf9b7c2938984f34bbcc33d45482d23c52c7539/skills_utils/iteration.py#L23-L33
devopshq/crosspm
crosspm/helpers/locker.py
Locker.lock_packages
def lock_packages(self, deps_file_path=None, depslock_file_path=None, packages=None): """ Lock packages. Downloader search packages """ if deps_file_path is None: deps_file_path = self._deps_path if depslock_file_path is None: depslock_file_path = self._depslock_path if deps_file_path == depslock_file_path: depslock_file_path += '.lock' # raise CrosspmException( # CROSSPM_ERRORCODE_WRONG_ARGS, # 'Dependencies and Lock files are same: "{}".'.format(deps_file_path), # ) if packages is None: self.search_dependencies(deps_file_path) else: self._root_package.packages = packages self._log.info('Writing lock file [{}]'.format(depslock_file_path)) output_params = { 'out_format': 'lock', 'output': depslock_file_path, } Output(config=self._config).write_output(output_params, self._root_package.packages) self._log.info('Done!')
python
def lock_packages(self, deps_file_path=None, depslock_file_path=None, packages=None): """ Lock packages. Downloader search packages """ if deps_file_path is None: deps_file_path = self._deps_path if depslock_file_path is None: depslock_file_path = self._depslock_path if deps_file_path == depslock_file_path: depslock_file_path += '.lock' # raise CrosspmException( # CROSSPM_ERRORCODE_WRONG_ARGS, # 'Dependencies and Lock files are same: "{}".'.format(deps_file_path), # ) if packages is None: self.search_dependencies(deps_file_path) else: self._root_package.packages = packages self._log.info('Writing lock file [{}]'.format(depslock_file_path)) output_params = { 'out_format': 'lock', 'output': depslock_file_path, } Output(config=self._config).write_output(output_params, self._root_package.packages) self._log.info('Done!')
[ "def", "lock_packages", "(", "self", ",", "deps_file_path", "=", "None", ",", "depslock_file_path", "=", "None", ",", "packages", "=", "None", ")", ":", "if", "deps_file_path", "is", "None", ":", "deps_file_path", "=", "self", ".", "_deps_path", "if", "depsl...
Lock packages. Downloader search packages
[ "Lock", "packages", ".", "Downloader", "search", "packages" ]
train
https://github.com/devopshq/crosspm/blob/c831442ecfaa1d43c66cb148857096cea292c950/crosspm/helpers/locker.py#L27-L54
devopshq/crosspm
crosspm/cpm.py
CrossPM.stdout
def stdout(self): """ Флаг --stdout может быть взят из переменной окружения CROSSPM_STDOUT. Если есть любое значение в CROSSPM_STDOUT - оно понимается как True :return: """ # --stdout stdout = self._args['--stdout'] if stdout: return True # CROSSPM_STDOUT stdout_env = os.getenv('CROSSPM_STDOUT', None) if stdout_env is not None: return True return False
python
def stdout(self): """ Флаг --stdout может быть взят из переменной окружения CROSSPM_STDOUT. Если есть любое значение в CROSSPM_STDOUT - оно понимается как True :return: """ # --stdout stdout = self._args['--stdout'] if stdout: return True # CROSSPM_STDOUT stdout_env = os.getenv('CROSSPM_STDOUT', None) if stdout_env is not None: return True return False
[ "def", "stdout", "(", "self", ")", ":", "# --stdout", "stdout", "=", "self", ".", "_args", "[", "'--stdout'", "]", "if", "stdout", ":", "return", "True", "# CROSSPM_STDOUT", "stdout_env", "=", "os", ".", "getenv", "(", "'CROSSPM_STDOUT'", ",", "None", ")",...
Флаг --stdout может быть взят из переменной окружения CROSSPM_STDOUT. Если есть любое значение в CROSSPM_STDOUT - оно понимается как True :return:
[ "Флаг", "--", "stdout", "может", "быть", "взят", "из", "переменной", "окружения", "CROSSPM_STDOUT", ".", "Если", "есть", "любое", "значение", "в", "CROSSPM_STDOUT", "-", "оно", "понимается", "как", "True", ":", "return", ":" ]
train
https://github.com/devopshq/crosspm/blob/c831442ecfaa1d43c66cb148857096cea292c950/crosspm/cpm.py#L144-L160
devopshq/crosspm
crosspm/cpm.py
CrossPM.prepare_args
def prepare_args(args, windows=None): """ Prepare args - add support for old interface, e.g: - --recursive was "flag" and for now it support True or False value :param args: :return: """ if windows is None: windows = "win" in sys.platform if isinstance(args, str): args = shlex.split(args, posix=not windows) elif isinstance(args, list): pass elif args is None: args = sys.argv[1:] else: raise Exception("Unknown args type: {}".format(type(args))) # --recursive => --recursive=True|False convert for position, argument in enumerate(args): # Normal way, skip change if argument.lower() in ('--recursive=true', '--recursive=false'): return args elif argument.lower() == '--recursive': if len(args) > position + 1 and args[position + 1].lower() in ["true", "false"]: # --recursive true | false return args else: # legacy way, convert --recursive to --recursive=true args[position] = "--recursive=True" return args return args
python
def prepare_args(args, windows=None): """ Prepare args - add support for old interface, e.g: - --recursive was "flag" and for now it support True or False value :param args: :return: """ if windows is None: windows = "win" in sys.platform if isinstance(args, str): args = shlex.split(args, posix=not windows) elif isinstance(args, list): pass elif args is None: args = sys.argv[1:] else: raise Exception("Unknown args type: {}".format(type(args))) # --recursive => --recursive=True|False convert for position, argument in enumerate(args): # Normal way, skip change if argument.lower() in ('--recursive=true', '--recursive=false'): return args elif argument.lower() == '--recursive': if len(args) > position + 1 and args[position + 1].lower() in ["true", "false"]: # --recursive true | false return args else: # legacy way, convert --recursive to --recursive=true args[position] = "--recursive=True" return args return args
[ "def", "prepare_args", "(", "args", ",", "windows", "=", "None", ")", ":", "if", "windows", "is", "None", ":", "windows", "=", "\"win\"", "in", "sys", ".", "platform", "if", "isinstance", "(", "args", ",", "str", ")", ":", "args", "=", "shlex", ".", ...
Prepare args - add support for old interface, e.g: - --recursive was "flag" and for now it support True or False value :param args: :return:
[ "Prepare", "args", "-", "add", "support", "for", "old", "interface", "e", ".", "g", ":", "-", "--", "recursive", "was", "flag", "and", "for", "now", "it", "support", "True", "or", "False", "value", ":", "param", "args", ":", ":", "return", ":" ]
train
https://github.com/devopshq/crosspm/blob/c831442ecfaa1d43c66cb148857096cea292c950/crosspm/cpm.py#L163-L196
bachya/regenmaschine
regenmaschine/watering.py
Watering.log
async def log( self, date: datetime.date = None, days: int = None, details: bool = False) -> list: """Get watering information for X days from Y date.""" endpoint = 'watering/log' if details: endpoint += '/details' if date and days: endpoint = '{0}/{1}/{2}'.format( endpoint, date.strftime('%Y-%m-%d'), days) data = await self._request('get', endpoint) return data['waterLog']['days']
python
async def log( self, date: datetime.date = None, days: int = None, details: bool = False) -> list: """Get watering information for X days from Y date.""" endpoint = 'watering/log' if details: endpoint += '/details' if date and days: endpoint = '{0}/{1}/{2}'.format( endpoint, date.strftime('%Y-%m-%d'), days) data = await self._request('get', endpoint) return data['waterLog']['days']
[ "async", "def", "log", "(", "self", ",", "date", ":", "datetime", ".", "date", "=", "None", ",", "days", ":", "int", "=", "None", ",", "details", ":", "bool", "=", "False", ")", "->", "list", ":", "endpoint", "=", "'watering/log'", "if", "details", ...
Get watering information for X days from Y date.
[ "Get", "watering", "information", "for", "X", "days", "from", "Y", "date", "." ]
train
https://github.com/bachya/regenmaschine/blob/99afb648fe454dc4a7d5db85a02a8b3b5d26f8bc/regenmaschine/watering.py#L13-L28
bachya/regenmaschine
regenmaschine/watering.py
Watering.runs
async def runs(self, date: datetime.date = None, days: int = None) -> list: """Return all program runs for X days from Y date.""" endpoint = 'watering/past' if date and days: endpoint = '{0}/{1}/{2}'.format( endpoint, date.strftime('%Y-%m-%d'), days) data = await self._request('get', endpoint) return data['pastValues']
python
async def runs(self, date: datetime.date = None, days: int = None) -> list: """Return all program runs for X days from Y date.""" endpoint = 'watering/past' if date and days: endpoint = '{0}/{1}/{2}'.format( endpoint, date.strftime('%Y-%m-%d'), days) data = await self._request('get', endpoint) return data['pastValues']
[ "async", "def", "runs", "(", "self", ",", "date", ":", "datetime", ".", "date", "=", "None", ",", "days", ":", "int", "=", "None", ")", "->", "list", ":", "endpoint", "=", "'watering/past'", "if", "date", "and", "days", ":", "endpoint", "=", "'{0}/{1...
Return all program runs for X days from Y date.
[ "Return", "all", "program", "runs", "for", "X", "days", "from", "Y", "date", "." ]
train
https://github.com/bachya/regenmaschine/blob/99afb648fe454dc4a7d5db85a02a8b3b5d26f8bc/regenmaschine/watering.py#L40-L49
devopshq/crosspm
crosspm/adapters/artifactoryaql.py
Adapter.search_auth
def search_auth(self, list_or_file_path, source): """ Looking for auth in env, cmdline, str :param list_or_file_path: :param source: """ _auth = source.args['auth'] if isinstance(_auth, str): if ':' in _auth: _auth = _auth.split(':') elif _auth.endswith('}') and ( _auth.startswith('{') or ':' in _auth): # {auth}, {user}:{password}, user:{password} _auth = self.get_auth(list_or_file_path, _auth) _auth = self.split_auth(_auth) if isinstance(_auth, list): for i in range(len(_auth)): if _auth[i].endswith('}') and ( _auth[i].startswith('{') or ':' in _auth[i]): # {auth}, {user}:{password}, user:{password} _auth[i] = self.get_auth(list_or_file_path, _auth[i]) if ':' in _auth[i]: _auth = self.split_auth(_auth[i]) source.args['auth'] = _auth
python
def search_auth(self, list_or_file_path, source): """ Looking for auth in env, cmdline, str :param list_or_file_path: :param source: """ _auth = source.args['auth'] if isinstance(_auth, str): if ':' in _auth: _auth = _auth.split(':') elif _auth.endswith('}') and ( _auth.startswith('{') or ':' in _auth): # {auth}, {user}:{password}, user:{password} _auth = self.get_auth(list_or_file_path, _auth) _auth = self.split_auth(_auth) if isinstance(_auth, list): for i in range(len(_auth)): if _auth[i].endswith('}') and ( _auth[i].startswith('{') or ':' in _auth[i]): # {auth}, {user}:{password}, user:{password} _auth[i] = self.get_auth(list_or_file_path, _auth[i]) if ':' in _auth[i]: _auth = self.split_auth(_auth[i]) source.args['auth'] = _auth
[ "def", "search_auth", "(", "self", ",", "list_or_file_path", ",", "source", ")", ":", "_auth", "=", "source", ".", "args", "[", "'auth'", "]", "if", "isinstance", "(", "_auth", ",", "str", ")", ":", "if", "':'", "in", "_auth", ":", "_auth", "=", "_au...
Looking for auth in env, cmdline, str :param list_or_file_path: :param source:
[ "Looking", "for", "auth", "in", "env", "cmdline", "str", ":", "param", "list_or_file_path", ":", ":", "param", "source", ":" ]
train
https://github.com/devopshq/crosspm/blob/c831442ecfaa1d43c66cb148857096cea292c950/crosspm/adapters/artifactoryaql.py#L283-L306
bachya/regenmaschine
regenmaschine/program.py
Program._post
async def _post(self, program_id: int = None, json: dict = None) -> dict: """Post data to a (non)existing program.""" return await self._request( 'post', 'program/{0}'.format(program_id), json=json)
python
async def _post(self, program_id: int = None, json: dict = None) -> dict: """Post data to a (non)existing program.""" return await self._request( 'post', 'program/{0}'.format(program_id), json=json)
[ "async", "def", "_post", "(", "self", ",", "program_id", ":", "int", "=", "None", ",", "json", ":", "dict", "=", "None", ")", "->", "dict", ":", "return", "await", "self", ".", "_request", "(", "'post'", ",", "'program/{0}'", ".", "format", "(", "pro...
Post data to a (non)existing program.
[ "Post", "data", "to", "a", "(", "non", ")", "existing", "program", "." ]
train
https://github.com/bachya/regenmaschine/blob/99afb648fe454dc4a7d5db85a02a8b3b5d26f8bc/regenmaschine/program.py#L12-L15
bachya/regenmaschine
regenmaschine/program.py
Program.all
async def all(self, include_inactive: bool = False) -> list: """Return all programs.""" data = await self._request('get', 'program') return [p for p in data['programs'] if include_inactive or p['active']]
python
async def all(self, include_inactive: bool = False) -> list: """Return all programs.""" data = await self._request('get', 'program') return [p for p in data['programs'] if include_inactive or p['active']]
[ "async", "def", "all", "(", "self", ",", "include_inactive", ":", "bool", "=", "False", ")", "->", "list", ":", "data", "=", "await", "self", ".", "_request", "(", "'get'", ",", "'program'", ")", "return", "[", "p", "for", "p", "in", "data", "[", "...
Return all programs.
[ "Return", "all", "programs", "." ]
train
https://github.com/bachya/regenmaschine/blob/99afb648fe454dc4a7d5db85a02a8b3b5d26f8bc/regenmaschine/program.py#L17-L20
workforce-data-initiative/skills-utils
skills_utils/fs.py
cache_json
def cache_json(filename): """Caches the JSON-serializable output of the function to a given file Args: filename (str) The filename (sans directory) to store the output Returns: decorator, applicable to a function that produces JSON-serializable output """ def cache_decorator(cacheable_function): @wraps(cacheable_function) def cache_wrapper(*args, **kwargs): path = CACHE_DIRECTORY + filename check_create_folder(path) if os.path.exists(path): with open(path) as infile: return json.load(infile) else: function_output = cacheable_function(*args, **kwargs) with open(path, 'w') as outfile: json.dump(function_output, outfile) return function_output return cache_wrapper return cache_decorator
python
def cache_json(filename): """Caches the JSON-serializable output of the function to a given file Args: filename (str) The filename (sans directory) to store the output Returns: decorator, applicable to a function that produces JSON-serializable output """ def cache_decorator(cacheable_function): @wraps(cacheable_function) def cache_wrapper(*args, **kwargs): path = CACHE_DIRECTORY + filename check_create_folder(path) if os.path.exists(path): with open(path) as infile: return json.load(infile) else: function_output = cacheable_function(*args, **kwargs) with open(path, 'w') as outfile: json.dump(function_output, outfile) return function_output return cache_wrapper return cache_decorator
[ "def", "cache_json", "(", "filename", ")", ":", "def", "cache_decorator", "(", "cacheable_function", ")", ":", "@", "wraps", "(", "cacheable_function", ")", "def", "cache_wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "path", "=", "CACHE_DI...
Caches the JSON-serializable output of the function to a given file Args: filename (str) The filename (sans directory) to store the output Returns: decorator, applicable to a function that produces JSON-serializable output
[ "Caches", "the", "JSON", "-", "serializable", "output", "of", "the", "function", "to", "a", "given", "file" ]
train
https://github.com/workforce-data-initiative/skills-utils/blob/4cf9b7c2938984f34bbcc33d45482d23c52c7539/skills_utils/fs.py#L10-L32
workforce-data-initiative/skills-utils
skills_utils/fs.py
check_create_folder
def check_create_folder(filename): """Check if the folder exisits. If not, create the folder""" os.makedirs(os.path.dirname(filename), exist_ok=True)
python
def check_create_folder(filename): """Check if the folder exisits. If not, create the folder""" os.makedirs(os.path.dirname(filename), exist_ok=True)
[ "def", "check_create_folder", "(", "filename", ")", ":", "os", ".", "makedirs", "(", "os", ".", "path", ".", "dirname", "(", "filename", ")", ",", "exist_ok", "=", "True", ")" ]
Check if the folder exisits. If not, create the folder
[ "Check", "if", "the", "folder", "exisits", ".", "If", "not", "create", "the", "folder" ]
train
https://github.com/workforce-data-initiative/skills-utils/blob/4cf9b7c2938984f34bbcc33d45482d23c52c7539/skills_utils/fs.py#L35-L37
awesto/djangoshop-paypal
shop_paypal/payment.py
PayPalPayment.get_payment_request
def get_payment_request(self, cart, request): """ From the given request, redirect onto the checkout view, hosted by PayPal. """ shop_ns = resolve(request.path).namespace return_url = reverse('{}:{}:return'.format(shop_ns, self.namespace)) cancel_url = reverse('{}:{}:cancel'.format(shop_ns, self.namespace)) cart = CartModel.objects.get_from_request(request) cart.update(request) # to calculate the total auth_token_hash = self.get_auth_token() payload = { 'url': '{API_ENDPOINT}/v1/payments/payment'.format(**settings.SHOP_PAYPAL), 'method': 'POST', 'headers': { 'Content-Type': 'application/json', 'Authorization': '{token_type} {access_token}'.format(**auth_token_hash), }, 'data': { 'intent': 'sale', 'redirect_urls': { 'return_url': request.build_absolute_uri(return_url), 'cancel_url': request.build_absolute_uri(cancel_url), }, 'payer': { 'payment_method': 'paypal', }, 'transactions': [{ 'amount': { 'total': cart.total.as_decimal(), 'currency': cart.total.currency, } }] } } config = json.dumps(payload, cls=DjangoJSONEncoder) success_handler = """ function successCallback(r) { console.log(r); $window.location.href=r.data.links.filter(function(e){ return e.rel==='approval_url'; })[0].href; }""".replace(' ', '').replace('\n', '') error_handler = """ function errorCallback(r) { console.error(r); }""".replace(' ', '').replace('\n', '') js_expression = '$http({0}).then({1},{2})'.format(config, success_handler, error_handler) return js_expression
python
def get_payment_request(self, cart, request): """ From the given request, redirect onto the checkout view, hosted by PayPal. """ shop_ns = resolve(request.path).namespace return_url = reverse('{}:{}:return'.format(shop_ns, self.namespace)) cancel_url = reverse('{}:{}:cancel'.format(shop_ns, self.namespace)) cart = CartModel.objects.get_from_request(request) cart.update(request) # to calculate the total auth_token_hash = self.get_auth_token() payload = { 'url': '{API_ENDPOINT}/v1/payments/payment'.format(**settings.SHOP_PAYPAL), 'method': 'POST', 'headers': { 'Content-Type': 'application/json', 'Authorization': '{token_type} {access_token}'.format(**auth_token_hash), }, 'data': { 'intent': 'sale', 'redirect_urls': { 'return_url': request.build_absolute_uri(return_url), 'cancel_url': request.build_absolute_uri(cancel_url), }, 'payer': { 'payment_method': 'paypal', }, 'transactions': [{ 'amount': { 'total': cart.total.as_decimal(), 'currency': cart.total.currency, } }] } } config = json.dumps(payload, cls=DjangoJSONEncoder) success_handler = """ function successCallback(r) { console.log(r); $window.location.href=r.data.links.filter(function(e){ return e.rel==='approval_url'; })[0].href; }""".replace(' ', '').replace('\n', '') error_handler = """ function errorCallback(r) { console.error(r); }""".replace(' ', '').replace('\n', '') js_expression = '$http({0}).then({1},{2})'.format(config, success_handler, error_handler) return js_expression
[ "def", "get_payment_request", "(", "self", ",", "cart", ",", "request", ")", ":", "shop_ns", "=", "resolve", "(", "request", ".", "path", ")", ".", "namespace", "return_url", "=", "reverse", "(", "'{}:{}:return'", ".", "format", "(", "shop_ns", ",", "self"...
From the given request, redirect onto the checkout view, hosted by PayPal.
[ "From", "the", "given", "request", "redirect", "onto", "the", "checkout", "view", "hosted", "by", "PayPal", "." ]
train
https://github.com/awesto/djangoshop-paypal/blob/d1db892304869fc9b182404db1d6ef214fafe2a7/shop_paypal/payment.py#L48-L95
workforce-data-initiative/skills-utils
skills_utils/job_posting_import.py
JobPostingImportBase.postings
def postings(self, quarter, stats_counter=None): """Yield job postings in common schema format Args: quarter (str) The quarter, in format '2015Q1' stats_counter (object, optional) A counter that can track both input and output documents using a 'track' method. """ logging.info('Finding postings for %s', quarter) for posting in self._iter_postings(quarter): transformed = self._transform(posting) transformed['id'] = '{}_{}'.format( self.partner_id, self._id(posting) ) if stats_counter: stats_counter.track( input_document=posting, output_document=transformed ) yield transformed
python
def postings(self, quarter, stats_counter=None): """Yield job postings in common schema format Args: quarter (str) The quarter, in format '2015Q1' stats_counter (object, optional) A counter that can track both input and output documents using a 'track' method. """ logging.info('Finding postings for %s', quarter) for posting in self._iter_postings(quarter): transformed = self._transform(posting) transformed['id'] = '{}_{}'.format( self.partner_id, self._id(posting) ) if stats_counter: stats_counter.track( input_document=posting, output_document=transformed ) yield transformed
[ "def", "postings", "(", "self", ",", "quarter", ",", "stats_counter", "=", "None", ")", ":", "logging", ".", "info", "(", "'Finding postings for %s'", ",", "quarter", ")", "for", "posting", "in", "self", ".", "_iter_postings", "(", "quarter", ")", ":", "tr...
Yield job postings in common schema format Args: quarter (str) The quarter, in format '2015Q1' stats_counter (object, optional) A counter that can track both input and output documents using a 'track' method.
[ "Yield", "job", "postings", "in", "common", "schema", "format" ]
train
https://github.com/workforce-data-initiative/skills-utils/blob/4cf9b7c2938984f34bbcc33d45482d23c52c7539/skills_utils/job_posting_import.py#L21-L41
mgrijalva/nose2-html-report
nose2_html_report/html_report.py
HTMLReporter._generate_search_terms
def _generate_search_terms(self): """ Map search terms to what test case(s) they're related to Returns: dict: maps search terms to what test case(s) it's relevant to Example: { '12034': ['ui.tests.TestSomething.test_hello_world'], 'buggy': ['ui.tests.TestSomething.test_hello_world', 'ui.tests.TestSomething.buggy_test_case'], 'ui.tests.TestAnother.test_fail': ['ui.tests.TestAnother.test_fail'] } """ search_terms = {} for test_result in self.test_results: # search for the test name itself maps to the test case search_terms[test_result['name']] = test_result['name'] if test_result['description']: for token in test_result['description'].split(): if token in search_terms: search_terms[token].append(test_result['name']) else: search_terms[token] = [test_result['name']] return search_terms
python
def _generate_search_terms(self): """ Map search terms to what test case(s) they're related to Returns: dict: maps search terms to what test case(s) it's relevant to Example: { '12034': ['ui.tests.TestSomething.test_hello_world'], 'buggy': ['ui.tests.TestSomething.test_hello_world', 'ui.tests.TestSomething.buggy_test_case'], 'ui.tests.TestAnother.test_fail': ['ui.tests.TestAnother.test_fail'] } """ search_terms = {} for test_result in self.test_results: # search for the test name itself maps to the test case search_terms[test_result['name']] = test_result['name'] if test_result['description']: for token in test_result['description'].split(): if token in search_terms: search_terms[token].append(test_result['name']) else: search_terms[token] = [test_result['name']] return search_terms
[ "def", "_generate_search_terms", "(", "self", ")", ":", "search_terms", "=", "{", "}", "for", "test_result", "in", "self", ".", "test_results", ":", "# search for the test name itself maps to the test case", "search_terms", "[", "test_result", "[", "'name'", "]", "]",...
Map search terms to what test case(s) they're related to Returns: dict: maps search terms to what test case(s) it's relevant to Example: { '12034': ['ui.tests.TestSomething.test_hello_world'], 'buggy': ['ui.tests.TestSomething.test_hello_world', 'ui.tests.TestSomething.buggy_test_case'], 'ui.tests.TestAnother.test_fail': ['ui.tests.TestAnother.test_fail'] }
[ "Map", "search", "terms", "to", "what", "test", "case", "(", "s", ")", "they", "re", "related", "to" ]
train
https://github.com/mgrijalva/nose2-html-report/blob/e8c89bedec5d5fe085b65d399cf1fe6647a3c179/nose2_html_report/html_report.py#L33-L60
mgrijalva/nose2-html-report
nose2_html_report/html_report.py
HTMLReporter.afterSummaryReport
def afterSummaryReport(self, event): """ After everything is done, generate the report """ logger.info('Generating HTML report...') sorted_test_results = self._sort_test_results() context = { 'test_report_title': 'Test Report', 'test_summary': self.summary_stats, 'test_results': sorted_test_results, 'autocomplete_terms': json.dumps(self._generate_search_terms()), 'timestamp': datetime.utcnow().strftime('%Y/%m/%d %H:%M:%S UTC') } template = load_template(self._config['template']) rendered_template = render_template(template, context) with open(self._config['report_path'], 'w') as template_file: template_file.write(rendered_template)
python
def afterSummaryReport(self, event): """ After everything is done, generate the report """ logger.info('Generating HTML report...') sorted_test_results = self._sort_test_results() context = { 'test_report_title': 'Test Report', 'test_summary': self.summary_stats, 'test_results': sorted_test_results, 'autocomplete_terms': json.dumps(self._generate_search_terms()), 'timestamp': datetime.utcnow().strftime('%Y/%m/%d %H:%M:%S UTC') } template = load_template(self._config['template']) rendered_template = render_template(template, context) with open(self._config['report_path'], 'w') as template_file: template_file.write(rendered_template)
[ "def", "afterSummaryReport", "(", "self", ",", "event", ")", ":", "logger", ".", "info", "(", "'Generating HTML report...'", ")", "sorted_test_results", "=", "self", ".", "_sort_test_results", "(", ")", "context", "=", "{", "'test_report_title'", ":", "'Test Repor...
After everything is done, generate the report
[ "After", "everything", "is", "done", "generate", "the", "report" ]
train
https://github.com/mgrijalva/nose2-html-report/blob/e8c89bedec5d5fe085b65d399cf1fe6647a3c179/nose2_html_report/html_report.py#L97-L115
workforce-data-initiative/skills-utils
skills_utils/time.py
quarter_to_daterange
def quarter_to_daterange(quarter): """Convert a quarter in arbitrary filename-ready format (e.g. 2015Q1) into start and end datetimes""" assert len(quarter) == 6 year = int(quarter[0:4]) quarter = quarter[5] MONTH_DAY = { '1': ((1, 1), (3, 31)), '2': ((4, 1), (6, 30)), '3': ((7, 1), (9, 30)), '4': ((10, 1), (12, 31)) } md = MONTH_DAY[quarter] start_md, end_md = md return ( date(year, *start_md), date(year, *end_md) )
python
def quarter_to_daterange(quarter): """Convert a quarter in arbitrary filename-ready format (e.g. 2015Q1) into start and end datetimes""" assert len(quarter) == 6 year = int(quarter[0:4]) quarter = quarter[5] MONTH_DAY = { '1': ((1, 1), (3, 31)), '2': ((4, 1), (6, 30)), '3': ((7, 1), (9, 30)), '4': ((10, 1), (12, 31)) } md = MONTH_DAY[quarter] start_md, end_md = md return ( date(year, *start_md), date(year, *end_md) )
[ "def", "quarter_to_daterange", "(", "quarter", ")", ":", "assert", "len", "(", "quarter", ")", "==", "6", "year", "=", "int", "(", "quarter", "[", "0", ":", "4", "]", ")", "quarter", "=", "quarter", "[", "5", "]", "MONTH_DAY", "=", "{", "'1'", ":",...
Convert a quarter in arbitrary filename-ready format (e.g. 2015Q1) into start and end datetimes
[ "Convert", "a", "quarter", "in", "arbitrary", "filename", "-", "ready", "format", "(", "e", ".", "g", ".", "2015Q1", ")", "into", "start", "and", "end", "datetimes" ]
train
https://github.com/workforce-data-initiative/skills-utils/blob/4cf9b7c2938984f34bbcc33d45482d23c52c7539/skills_utils/time.py#L6-L23
workforce-data-initiative/skills-utils
skills_utils/time.py
datetime_to_year_quarter
def datetime_to_year_quarter(dt): """ Args: dt: a datetime Returns: tuple of the datetime's year and quarter """ year = dt.year quarter = int(math.ceil(float(dt.month)/3)) return (year, quarter)
python
def datetime_to_year_quarter(dt): """ Args: dt: a datetime Returns: tuple of the datetime's year and quarter """ year = dt.year quarter = int(math.ceil(float(dt.month)/3)) return (year, quarter)
[ "def", "datetime_to_year_quarter", "(", "dt", ")", ":", "year", "=", "dt", ".", "year", "quarter", "=", "int", "(", "math", ".", "ceil", "(", "float", "(", "dt", ".", "month", ")", "/", "3", ")", ")", "return", "(", "year", ",", "quarter", ")" ]
Args: dt: a datetime Returns: tuple of the datetime's year and quarter
[ "Args", ":", "dt", ":", "a", "datetime", "Returns", ":", "tuple", "of", "the", "datetime", "s", "year", "and", "quarter" ]
train
https://github.com/workforce-data-initiative/skills-utils/blob/4cf9b7c2938984f34bbcc33d45482d23c52c7539/skills_utils/time.py#L26-L35
workforce-data-initiative/skills-utils
skills_utils/time.py
dates_in_range
def dates_in_range(start_date, end_date): """Returns all dates between two dates. Inclusive of the start date but not the end date. Args: start_date (datetime.date) end_date (datetime.date) Returns: (list) of datetime.date objects """ return [ start_date + timedelta(n) for n in range(int((end_date - start_date).days)) ]
python
def dates_in_range(start_date, end_date): """Returns all dates between two dates. Inclusive of the start date but not the end date. Args: start_date (datetime.date) end_date (datetime.date) Returns: (list) of datetime.date objects """ return [ start_date + timedelta(n) for n in range(int((end_date - start_date).days)) ]
[ "def", "dates_in_range", "(", "start_date", ",", "end_date", ")", ":", "return", "[", "start_date", "+", "timedelta", "(", "n", ")", "for", "n", "in", "range", "(", "int", "(", "(", "end_date", "-", "start_date", ")", ".", "days", ")", ")", "]" ]
Returns all dates between two dates. Inclusive of the start date but not the end date. Args: start_date (datetime.date) end_date (datetime.date) Returns: (list) of datetime.date objects
[ "Returns", "all", "dates", "between", "two", "dates", "." ]
train
https://github.com/workforce-data-initiative/skills-utils/blob/4cf9b7c2938984f34bbcc33d45482d23c52c7539/skills_utils/time.py#L53-L68
ColinDuquesnoy/pyqt_distutils
pyqt_distutils/hooks.py
load_hooks
def load_hooks(): """ Load the exposed hooks. Returns a dict of hooks where the keys are the name of the hook and the values are the actual hook functions. """ hooks = {} for entrypoint in pkg_resources.iter_entry_points(ENTRYPOINT): name = str(entrypoint).split('=')[0].strip() try: hook = entrypoint.load() except Exception as e: write_message('failed to load entry-point %r (error="%s")' % (name, e), 'yellow') else: hooks[name] = hook return hooks
python
def load_hooks(): """ Load the exposed hooks. Returns a dict of hooks where the keys are the name of the hook and the values are the actual hook functions. """ hooks = {} for entrypoint in pkg_resources.iter_entry_points(ENTRYPOINT): name = str(entrypoint).split('=')[0].strip() try: hook = entrypoint.load() except Exception as e: write_message('failed to load entry-point %r (error="%s")' % (name, e), 'yellow') else: hooks[name] = hook return hooks
[ "def", "load_hooks", "(", ")", ":", "hooks", "=", "{", "}", "for", "entrypoint", "in", "pkg_resources", ".", "iter_entry_points", "(", "ENTRYPOINT", ")", ":", "name", "=", "str", "(", "entrypoint", ")", ".", "split", "(", "'='", ")", "[", "0", "]", "...
Load the exposed hooks. Returns a dict of hooks where the keys are the name of the hook and the values are the actual hook functions.
[ "Load", "the", "exposed", "hooks", "." ]
train
https://github.com/ColinDuquesnoy/pyqt_distutils/blob/7387d64ea2db3b1dafb09d006266cec580131f7d/pyqt_distutils/hooks.py#L15-L31
ColinDuquesnoy/pyqt_distutils
pyqt_distutils/hooks.py
gettext
def gettext(ui_file_path): """ Let you use gettext instead of the Qt tools for l18n """ with open(ui_file_path, 'r') as fin: content = fin.read() # replace ``_translate("context", `` by ``_(`` content = re.sub(r'_translate\(".*",\s', '_(', content) content = content.replace( ' _translate = QtCore.QCoreApplication.translate', '') with open(ui_file_path, 'w') as fout: fout.write(content)
python
def gettext(ui_file_path): """ Let you use gettext instead of the Qt tools for l18n """ with open(ui_file_path, 'r') as fin: content = fin.read() # replace ``_translate("context", `` by ``_(`` content = re.sub(r'_translate\(".*",\s', '_(', content) content = content.replace( ' _translate = QtCore.QCoreApplication.translate', '') with open(ui_file_path, 'w') as fout: fout.write(content)
[ "def", "gettext", "(", "ui_file_path", ")", ":", "with", "open", "(", "ui_file_path", ",", "'r'", ")", "as", "fin", ":", "content", "=", "fin", ".", "read", "(", ")", "# replace ``_translate(\"context\", `` by ``_(``", "content", "=", "re", ".", "sub", "(", ...
Let you use gettext instead of the Qt tools for l18n
[ "Let", "you", "use", "gettext", "instead", "of", "the", "Qt", "tools", "for", "l18n" ]
train
https://github.com/ColinDuquesnoy/pyqt_distutils/blob/7387d64ea2db3b1dafb09d006266cec580131f7d/pyqt_distutils/hooks.py#L41-L54
workforce-data-initiative/skills-utils
skills_utils/io.py
stream_json_file
def stream_json_file(local_file): """Stream a JSON file (in JSON-per-line format) Args: local_file (file-like object) an open file-handle that contains a JSON string on each line Yields: (dict) JSON objects """ for i, line in enumerate(local_file): try: data = json.loads(line.decode('utf-8')) yield data except ValueError as e: logging.warning("Skipping line %d due to error: %s", i, e) continue
python
def stream_json_file(local_file): """Stream a JSON file (in JSON-per-line format) Args: local_file (file-like object) an open file-handle that contains a JSON string on each line Yields: (dict) JSON objects """ for i, line in enumerate(local_file): try: data = json.loads(line.decode('utf-8')) yield data except ValueError as e: logging.warning("Skipping line %d due to error: %s", i, e) continue
[ "def", "stream_json_file", "(", "local_file", ")", ":", "for", "i", ",", "line", "in", "enumerate", "(", "local_file", ")", ":", "try", ":", "data", "=", "json", ".", "loads", "(", "line", ".", "decode", "(", "'utf-8'", ")", ")", "yield", "data", "ex...
Stream a JSON file (in JSON-per-line format) Args: local_file (file-like object) an open file-handle that contains a JSON string on each line Yields: (dict) JSON objects
[ "Stream", "a", "JSON", "file", "(", "in", "JSON", "-", "per", "-", "line", "format", ")" ]
train
https://github.com/workforce-data-initiative/skills-utils/blob/4cf9b7c2938984f34bbcc33d45482d23c52c7539/skills_utils/io.py#L6-L21
devopshq/crosspm
crosspm/helpers/package.py
Package.download
def download(self, force=False): """ Download file containing this package. :param force: Force download even if it seems file already exists :return: Full path with filename of downloaded package file. """ exists, dest_path = self._downloader.cache.exists_packed(package=self, pkg_path=self.packed_path, check_stat=not self._in_cache) unp_exists, unp_path = self._downloader.cache.exists_unpacked(package=self, pkg_path=self.unpacked_path) # Если архива нет, то и кешу доверять не стоит if not exists: unp_exists = False if exists and not self.packed_path: self.packed_path = dest_path if force or not exists: # _packed_path = self._packed_path dest_path_tmp = dest_path + ".tmp" if os.path.exists(dest_path_tmp): os.remove(dest_path_tmp) self._adapter.download_package(self._pkg, dest_path_tmp) os.rename(dest_path_tmp, dest_path) self.packed_path = dest_path # if not _packed_path: self._not_cached = True else: if unp_exists and not self.unpacked_path: self.unpacked_path = unp_path self._not_cached = False if self._not_cached and unp_exists: shutil.rmtree(unp_path, ignore_errors=True) return self.packed_path
python
def download(self, force=False): """ Download file containing this package. :param force: Force download even if it seems file already exists :return: Full path with filename of downloaded package file. """ exists, dest_path = self._downloader.cache.exists_packed(package=self, pkg_path=self.packed_path, check_stat=not self._in_cache) unp_exists, unp_path = self._downloader.cache.exists_unpacked(package=self, pkg_path=self.unpacked_path) # Если архива нет, то и кешу доверять не стоит if not exists: unp_exists = False if exists and not self.packed_path: self.packed_path = dest_path if force or not exists: # _packed_path = self._packed_path dest_path_tmp = dest_path + ".tmp" if os.path.exists(dest_path_tmp): os.remove(dest_path_tmp) self._adapter.download_package(self._pkg, dest_path_tmp) os.rename(dest_path_tmp, dest_path) self.packed_path = dest_path # if not _packed_path: self._not_cached = True else: if unp_exists and not self.unpacked_path: self.unpacked_path = unp_path self._not_cached = False if self._not_cached and unp_exists: shutil.rmtree(unp_path, ignore_errors=True) return self.packed_path
[ "def", "download", "(", "self", ",", "force", "=", "False", ")", ":", "exists", ",", "dest_path", "=", "self", ".", "_downloader", ".", "cache", ".", "exists_packed", "(", "package", "=", "self", ",", "pkg_path", "=", "self", ".", "packed_path", ",", "...
Download file containing this package. :param force: Force download even if it seems file already exists :return: Full path with filename of downloaded package file.
[ "Download", "file", "containing", "this", "package", ".", ":", "param", "force", ":", "Force", "download", "even", "if", "it", "seems", "file", "already", "exists", ":", "return", ":", "Full", "path", "with", "filename", "of", "downloaded", "package", "file"...
train
https://github.com/devopshq/crosspm/blob/c831442ecfaa1d43c66cb148857096cea292c950/crosspm/helpers/package.py#L57-L92
devopshq/crosspm
crosspm/helpers/package.py
Package.find_dependencies
def find_dependencies(self, depslock_file_path, property_validate=True, deps_content=None): """ Find all dependencies by package :param depslock_file_path: :param property_validate: for `root` packages we need check property, bad if we find packages from `lock` file, :param deps_content: HACK for use --dependencies-content and existed dependencies.txt.lock file we can skip validate part :return: """ self._raw = [x for x in self._downloader.common_parser.iter_packages_params(depslock_file_path, deps_content=deps_content)] self.packages = self._downloader.get_dependency_packages({'raw': self._raw}, property_validate=property_validate)
python
def find_dependencies(self, depslock_file_path, property_validate=True, deps_content=None): """ Find all dependencies by package :param depslock_file_path: :param property_validate: for `root` packages we need check property, bad if we find packages from `lock` file, :param deps_content: HACK for use --dependencies-content and existed dependencies.txt.lock file we can skip validate part :return: """ self._raw = [x for x in self._downloader.common_parser.iter_packages_params(depslock_file_path, deps_content=deps_content)] self.packages = self._downloader.get_dependency_packages({'raw': self._raw}, property_validate=property_validate)
[ "def", "find_dependencies", "(", "self", ",", "depslock_file_path", ",", "property_validate", "=", "True", ",", "deps_content", "=", "None", ")", ":", "self", ".", "_raw", "=", "[", "x", "for", "x", "in", "self", ".", "_downloader", ".", "common_parser", "...
Find all dependencies by package :param depslock_file_path: :param property_validate: for `root` packages we need check property, bad if we find packages from `lock` file, :param deps_content: HACK for use --dependencies-content and existed dependencies.txt.lock file we can skip validate part :return:
[ "Find", "all", "dependencies", "by", "package", ":", "param", "depslock_file_path", ":", ":", "param", "property_validate", ":", "for", "root", "packages", "we", "need", "check", "property", "bad", "if", "we", "find", "packages", "from", "lock", "file", ":", ...
train
https://github.com/devopshq/crosspm/blob/c831442ecfaa1d43c66cb148857096cea292c950/crosspm/helpers/package.py#L105-L117
devopshq/crosspm
crosspm/helpers/package.py
Package.find_usedby
def find_usedby(self, depslock_file_path, property_validate=True): """ Find all dependencies by package :param depslock_file_path: :param property_validate: for `root` packages we need check property, bad if we find packages from `lock` file, we can skip validate part :return: """ if depslock_file_path is None: self._raw = [self._params] self._raw[0]['repo'] = None self._raw[0]['server'] = None else: self._raw = [x for x in self._downloader.common_parser.iter_packages_params(depslock_file_path)] self.packages = self._downloader.get_usedby_packages({'raw': self._raw}, property_validate=property_validate)
python
def find_usedby(self, depslock_file_path, property_validate=True): """ Find all dependencies by package :param depslock_file_path: :param property_validate: for `root` packages we need check property, bad if we find packages from `lock` file, we can skip validate part :return: """ if depslock_file_path is None: self._raw = [self._params] self._raw[0]['repo'] = None self._raw[0]['server'] = None else: self._raw = [x for x in self._downloader.common_parser.iter_packages_params(depslock_file_path)] self.packages = self._downloader.get_usedby_packages({'raw': self._raw}, property_validate=property_validate)
[ "def", "find_usedby", "(", "self", ",", "depslock_file_path", ",", "property_validate", "=", "True", ")", ":", "if", "depslock_file_path", "is", "None", ":", "self", ".", "_raw", "=", "[", "self", ".", "_params", "]", "self", ".", "_raw", "[", "0", "]", ...
Find all dependencies by package :param depslock_file_path: :param property_validate: for `root` packages we need check property, bad if we find packages from `lock` file, we can skip validate part :return:
[ "Find", "all", "dependencies", "by", "package", ":", "param", "depslock_file_path", ":", ":", "param", "property_validate", ":", "for", "root", "packages", "we", "need", "check", "property", "bad", "if", "we", "find", "packages", "from", "lock", "file", "we", ...
train
https://github.com/devopshq/crosspm/blob/c831442ecfaa1d43c66cb148857096cea292c950/crosspm/helpers/package.py#L119-L134
devopshq/crosspm
crosspm/helpers/package.py
Package.get_params
def get_params(self, param_list=None, get_path=False, merged=False, raw=False): """ Get Package params :param param_list: name or list of parameters :param get_path: :param merged: if version splited, True return version in string :param raw: :return: """ # Convert parameter name to list if param_list and isinstance(param_list, str): param_list = [param_list] if param_list and isinstance(param_list, (list, tuple)): result = {k: v for k, v in self._params_found.items() if k in param_list} result.update({k: v for k, v in self._params.items() if (k in param_list and k not in result)}) else: result = {k: v for k, v in self._params_found.items()} result.update({k: v for k, v in self._params.items() if k not in result}) if get_path: result['path'] = self.unpacked_path if merged: result.update(self._parser.merge_valued(result)) if raw: result.update({k: v for k, v in self._params_found_raw.items()}) return result
python
def get_params(self, param_list=None, get_path=False, merged=False, raw=False): """ Get Package params :param param_list: name or list of parameters :param get_path: :param merged: if version splited, True return version in string :param raw: :return: """ # Convert parameter name to list if param_list and isinstance(param_list, str): param_list = [param_list] if param_list and isinstance(param_list, (list, tuple)): result = {k: v for k, v in self._params_found.items() if k in param_list} result.update({k: v for k, v in self._params.items() if (k in param_list and k not in result)}) else: result = {k: v for k, v in self._params_found.items()} result.update({k: v for k, v in self._params.items() if k not in result}) if get_path: result['path'] = self.unpacked_path if merged: result.update(self._parser.merge_valued(result)) if raw: result.update({k: v for k, v in self._params_found_raw.items()}) return result
[ "def", "get_params", "(", "self", ",", "param_list", "=", "None", ",", "get_path", "=", "False", ",", "merged", "=", "False", ",", "raw", "=", "False", ")", ":", "# Convert parameter name to list", "if", "param_list", "and", "isinstance", "(", "param_list", ...
Get Package params :param param_list: name or list of parameters :param get_path: :param merged: if version splited, True return version in string :param raw: :return:
[ "Get", "Package", "params", ":", "param", "param_list", ":", "name", "or", "list", "of", "parameters", ":", "param", "get_path", ":", ":", "param", "merged", ":", "if", "version", "splited", "True", "return", "version", "in", "string", ":", "param", "raw",...
train
https://github.com/devopshq/crosspm/blob/c831442ecfaa1d43c66cb148857096cea292c950/crosspm/helpers/package.py#L200-L224
devopshq/crosspm
crosspm/helpers/package.py
Package.get_none_packages
def get_none_packages(self): """ Get packages with None (not founded), recursively """ not_found = set() for package_name, package in self.packages.items(): if package is None: not_found.add(package_name) else: if package.packages: not_found = not_found | package.get_none_packages() return not_found
python
def get_none_packages(self): """ Get packages with None (not founded), recursively """ not_found = set() for package_name, package in self.packages.items(): if package is None: not_found.add(package_name) else: if package.packages: not_found = not_found | package.get_none_packages() return not_found
[ "def", "get_none_packages", "(", "self", ")", ":", "not_found", "=", "set", "(", ")", "for", "package_name", ",", "package", "in", "self", ".", "packages", ".", "items", "(", ")", ":", "if", "package", "is", "None", ":", "not_found", ".", "add", "(", ...
Get packages with None (not founded), recursively
[ "Get", "packages", "with", "None", "(", "not", "founded", ")", "recursively" ]
train
https://github.com/devopshq/crosspm/blob/c831442ecfaa1d43c66cb148857096cea292c950/crosspm/helpers/package.py#L235-L246
devopshq/crosspm
crosspm/helpers/parser.py
Parser.get_usedby_aql
def get_usedby_aql(self, params): """ Возвращает запрос AQL (без репозитория), из файла конфигурации :param params: :return: """ if self._usedby is None: return None _result = {} params = self.merge_valued(params) for k, v in self._usedby['AQL'].items(): if isinstance(v, str): k = k.format(**params) v = v.format(**params) _result[k] = v return _result
python
def get_usedby_aql(self, params): """ Возвращает запрос AQL (без репозитория), из файла конфигурации :param params: :return: """ if self._usedby is None: return None _result = {} params = self.merge_valued(params) for k, v in self._usedby['AQL'].items(): if isinstance(v, str): k = k.format(**params) v = v.format(**params) _result[k] = v return _result
[ "def", "get_usedby_aql", "(", "self", ",", "params", ")", ":", "if", "self", ".", "_usedby", "is", "None", ":", "return", "None", "_result", "=", "{", "}", "params", "=", "self", ".", "merge_valued", "(", "params", ")", "for", "k", ",", "v", "in", ...
Возвращает запрос AQL (без репозитория), из файла конфигурации :param params: :return:
[ "Возвращает", "запрос", "AQL", "(", "без", "репозитория", ")", "из", "файла", "конфигурации", ":", "param", "params", ":", ":", "return", ":" ]
train
https://github.com/devopshq/crosspm/blob/c831442ecfaa1d43c66cb148857096cea292c950/crosspm/helpers/parser.py#L36-L52
devopshq/crosspm
crosspm/helpers/parser.py
Parser.get_params_with_extra
def get_params_with_extra(self, rule_name, params): """ Get params with extra, like 'any' :param rule_name: 'path' :param params: default params :return: list of combination params """ # HACK for prefer-local result = [] extra_params = self._rules_vars_extra.get(rule_name, {})[0] _tmp_params = copy.deepcopy(params) _fixed_params = {} # Save params with list type - this type not changed for key, value in _tmp_params.items(): if isinstance(value, list): _fixed_params[key] = value _tmp_params = {k: v for k, v in _tmp_params.items() if k not in _fixed_params} # extend with extra_vars - like 'any' for key, value in _tmp_params.items(): if not isinstance(value, list) and key: _tmp_params[key] = list([value]) if key in extra_params: _tmp_params[key].extend(extra_params[key]) # get combinations keys = sorted(_tmp_params) combinations = itertools.product(*(_tmp_params[x] for x in keys)) for comb in combinations: _dict = dict(zip(keys, comb)) _dict.update(_fixed_params) result.append(_dict) return result
python
def get_params_with_extra(self, rule_name, params): """ Get params with extra, like 'any' :param rule_name: 'path' :param params: default params :return: list of combination params """ # HACK for prefer-local result = [] extra_params = self._rules_vars_extra.get(rule_name, {})[0] _tmp_params = copy.deepcopy(params) _fixed_params = {} # Save params with list type - this type not changed for key, value in _tmp_params.items(): if isinstance(value, list): _fixed_params[key] = value _tmp_params = {k: v for k, v in _tmp_params.items() if k not in _fixed_params} # extend with extra_vars - like 'any' for key, value in _tmp_params.items(): if not isinstance(value, list) and key: _tmp_params[key] = list([value]) if key in extra_params: _tmp_params[key].extend(extra_params[key]) # get combinations keys = sorted(_tmp_params) combinations = itertools.product(*(_tmp_params[x] for x in keys)) for comb in combinations: _dict = dict(zip(keys, comb)) _dict.update(_fixed_params) result.append(_dict) return result
[ "def", "get_params_with_extra", "(", "self", ",", "rule_name", ",", "params", ")", ":", "# HACK for prefer-local", "result", "=", "[", "]", "extra_params", "=", "self", ".", "_rules_vars_extra", ".", "get", "(", "rule_name", ",", "{", "}", ")", "[", "0", "...
Get params with extra, like 'any' :param rule_name: 'path' :param params: default params :return: list of combination params
[ "Get", "params", "with", "extra", "like", "any", ":", "param", "rule_name", ":", "path", ":", "param", "params", ":", "default", "params", ":", "return", ":", "list", "of", "combination", "params" ]
train
https://github.com/devopshq/crosspm/blob/c831442ecfaa1d43c66cb148857096cea292c950/crosspm/helpers/parser.py#L726-L760
devopshq/crosspm
crosspm/helpers/parser.py
Parser.split_fixed_pattern
def split_fixed_pattern(path): """ Split path into fixed and masked parts :param path: e.g https://repo.example.com/artifactory/libs-cpp-release.snapshot/boost/1.60-pm/*.*.*/vc110/x86/win/boost.*.*.*.tar.gz :return: _path_fixed: https://repo.example.com/artifactory/libs-cpp-release.snapshot/boost/1.60-pm/ _path_pattern: *.*.*/vc110/x86/win/boost.*.*.*.tar.gz """ _first_pattern_pos = path.find('*') _path_separator_pos = path.rfind('/', 0, _first_pattern_pos) + 1 _path_fixed = path[:_path_separator_pos] _path_pattern = path[_path_separator_pos:] return _path_fixed, _path_pattern
python
def split_fixed_pattern(path): """ Split path into fixed and masked parts :param path: e.g https://repo.example.com/artifactory/libs-cpp-release.snapshot/boost/1.60-pm/*.*.*/vc110/x86/win/boost.*.*.*.tar.gz :return: _path_fixed: https://repo.example.com/artifactory/libs-cpp-release.snapshot/boost/1.60-pm/ _path_pattern: *.*.*/vc110/x86/win/boost.*.*.*.tar.gz """ _first_pattern_pos = path.find('*') _path_separator_pos = path.rfind('/', 0, _first_pattern_pos) + 1 _path_fixed = path[:_path_separator_pos] _path_pattern = path[_path_separator_pos:] return _path_fixed, _path_pattern
[ "def", "split_fixed_pattern", "(", "path", ")", ":", "_first_pattern_pos", "=", "path", ".", "find", "(", "'*'", ")", "_path_separator_pos", "=", "path", ".", "rfind", "(", "'/'", ",", "0", ",", "_first_pattern_pos", ")", "+", "1", "_path_fixed", "=", "pat...
Split path into fixed and masked parts :param path: e.g https://repo.example.com/artifactory/libs-cpp-release.snapshot/boost/1.60-pm/*.*.*/vc110/x86/win/boost.*.*.*.tar.gz :return: _path_fixed: https://repo.example.com/artifactory/libs-cpp-release.snapshot/boost/1.60-pm/ _path_pattern: *.*.*/vc110/x86/win/boost.*.*.*.tar.gz
[ "Split", "path", "into", "fixed", "and", "masked", "parts", ":", "param", "path", ":", "e", ".", "g", "https", ":", "//", "repo", ".", "example", ".", "com", "/", "artifactory", "/", "libs", "-", "cpp", "-", "release", ".", "snapshot", "/", "boost", ...
train
https://github.com/devopshq/crosspm/blob/c831442ecfaa1d43c66cb148857096cea292c950/crosspm/helpers/parser.py#L908-L921
devopshq/crosspm
crosspm/helpers/parser.py
Parser.split_fixed_pattern_with_file_name
def split_fixed_pattern_with_file_name(path): """ Split path into fixed, masked parts and filename :param path: e.g https://repo.example.com/artifactory/libs-cpp-release.snapshot/boost/1.60-pm/*.*.*/vc110/x86/win/boost.*.*.*.tar.gz :return: _path_fixed: https://repo.example.com/artifactory/libs-cpp-release.snapshot/boost/1.60-pm/ _path_pattern: *.*.*/vc100/x86/win _file_name_pattern: boost.*.*.*.tar.gz """ _first_pattern_pos = path.find('*') _path_separator_pos = path.rfind('/', 0, _first_pattern_pos) _path_fixed = path[:_path_separator_pos] _path_pattern = path[_path_separator_pos + 1:] _file_name_pattern_separator_pos = _path_pattern.rfind('/', 0) _file_name_pattern = _path_pattern[_file_name_pattern_separator_pos + 1:] if _path_pattern.find('*') == -1 or _file_name_pattern_separator_pos == -1: _path_pattern = "" else: _path_pattern = _path_pattern[:_file_name_pattern_separator_pos] return _path_fixed, _path_pattern, _file_name_pattern
python
def split_fixed_pattern_with_file_name(path): """ Split path into fixed, masked parts and filename :param path: e.g https://repo.example.com/artifactory/libs-cpp-release.snapshot/boost/1.60-pm/*.*.*/vc110/x86/win/boost.*.*.*.tar.gz :return: _path_fixed: https://repo.example.com/artifactory/libs-cpp-release.snapshot/boost/1.60-pm/ _path_pattern: *.*.*/vc100/x86/win _file_name_pattern: boost.*.*.*.tar.gz """ _first_pattern_pos = path.find('*') _path_separator_pos = path.rfind('/', 0, _first_pattern_pos) _path_fixed = path[:_path_separator_pos] _path_pattern = path[_path_separator_pos + 1:] _file_name_pattern_separator_pos = _path_pattern.rfind('/', 0) _file_name_pattern = _path_pattern[_file_name_pattern_separator_pos + 1:] if _path_pattern.find('*') == -1 or _file_name_pattern_separator_pos == -1: _path_pattern = "" else: _path_pattern = _path_pattern[:_file_name_pattern_separator_pos] return _path_fixed, _path_pattern, _file_name_pattern
[ "def", "split_fixed_pattern_with_file_name", "(", "path", ")", ":", "_first_pattern_pos", "=", "path", ".", "find", "(", "'*'", ")", "_path_separator_pos", "=", "path", ".", "rfind", "(", "'/'", ",", "0", ",", "_first_pattern_pos", ")", "_path_fixed", "=", "pa...
Split path into fixed, masked parts and filename :param path: e.g https://repo.example.com/artifactory/libs-cpp-release.snapshot/boost/1.60-pm/*.*.*/vc110/x86/win/boost.*.*.*.tar.gz :return: _path_fixed: https://repo.example.com/artifactory/libs-cpp-release.snapshot/boost/1.60-pm/ _path_pattern: *.*.*/vc100/x86/win _file_name_pattern: boost.*.*.*.tar.gz
[ "Split", "path", "into", "fixed", "masked", "parts", "and", "filename", ":", "param", "path", ":", "e", ".", "g", "https", ":", "//", "repo", ".", "example", ".", "com", "/", "artifactory", "/", "libs", "-", "cpp", "-", "release", ".", "snapshot", "/...
train
https://github.com/devopshq/crosspm/blob/c831442ecfaa1d43c66cb148857096cea292c950/crosspm/helpers/parser.py#L924-L946
workforce-data-initiative/skills-utils
skills_utils/es.py
basic_client
def basic_client(): """Returns an Elasticsearch basic client that is responsive to the environment variable ELASTICSEARCH_ENDPOINT""" es_connected = False while not es_connected: try: ES = Elasticsearch( hosts=[HOSTNAME] ) es_connected = True except TransportError as e: logging.info('Not yet connected: %s, sleeping for 1s', e) time.sleep(1) return ES
python
def basic_client(): """Returns an Elasticsearch basic client that is responsive to the environment variable ELASTICSEARCH_ENDPOINT""" es_connected = False while not es_connected: try: ES = Elasticsearch( hosts=[HOSTNAME] ) es_connected = True except TransportError as e: logging.info('Not yet connected: %s, sleeping for 1s', e) time.sleep(1) return ES
[ "def", "basic_client", "(", ")", ":", "es_connected", "=", "False", "while", "not", "es_connected", ":", "try", ":", "ES", "=", "Elasticsearch", "(", "hosts", "=", "[", "HOSTNAME", "]", ")", "es_connected", "=", "True", "except", "TransportError", "as", "e...
Returns an Elasticsearch basic client that is responsive to the environment variable ELASTICSEARCH_ENDPOINT
[ "Returns", "an", "Elasticsearch", "basic", "client", "that", "is", "responsive", "to", "the", "environment", "variable", "ELASTICSEARCH_ENDPOINT" ]
train
https://github.com/workforce-data-initiative/skills-utils/blob/4cf9b7c2938984f34bbcc33d45482d23c52c7539/skills_utils/es.py#L16-L29
workforce-data-initiative/skills-utils
skills_utils/es.py
create_index
def create_index(index_name, index_config, client): """Creates an index with a given configuration Args: index_name (str): Name of the index you want to create index_config (dict) configuration for the index client (Elasticsearch.IndicesClient) the Elasticsearch client """ client.create(index=index_name, body=index_config)
python
def create_index(index_name, index_config, client): """Creates an index with a given configuration Args: index_name (str): Name of the index you want to create index_config (dict) configuration for the index client (Elasticsearch.IndicesClient) the Elasticsearch client """ client.create(index=index_name, body=index_config)
[ "def", "create_index", "(", "index_name", ",", "index_config", ",", "client", ")", ":", "client", ".", "create", "(", "index", "=", "index_name", ",", "body", "=", "index_config", ")" ]
Creates an index with a given configuration Args: index_name (str): Name of the index you want to create index_config (dict) configuration for the index client (Elasticsearch.IndicesClient) the Elasticsearch client
[ "Creates", "an", "index", "with", "a", "given", "configuration" ]
train
https://github.com/workforce-data-initiative/skills-utils/blob/4cf9b7c2938984f34bbcc33d45482d23c52c7539/skills_utils/es.py#L48-L56
workforce-data-initiative/skills-utils
skills_utils/es.py
get_index_from_alias
def get_index_from_alias(alias_name, index_client=None): """Retrieve the base index name from an alias Args: alias_name (str) Name of the alias index_client (Elasticsearch.IndicesClient) an Elasticsearch index client. Optional, will create one if not given Returns: (str) Name of index """ index_client = index_client or indices_client() if not index_client.exists_alias(name=alias_name): return None return list(index_client.get_alias(name=alias_name).keys())[0]
python
def get_index_from_alias(alias_name, index_client=None): """Retrieve the base index name from an alias Args: alias_name (str) Name of the alias index_client (Elasticsearch.IndicesClient) an Elasticsearch index client. Optional, will create one if not given Returns: (str) Name of index """ index_client = index_client or indices_client() if not index_client.exists_alias(name=alias_name): return None return list(index_client.get_alias(name=alias_name).keys())[0]
[ "def", "get_index_from_alias", "(", "alias_name", ",", "index_client", "=", "None", ")", ":", "index_client", "=", "index_client", "or", "indices_client", "(", ")", "if", "not", "index_client", ".", "exists_alias", "(", "name", "=", "alias_name", ")", ":", "re...
Retrieve the base index name from an alias Args: alias_name (str) Name of the alias index_client (Elasticsearch.IndicesClient) an Elasticsearch index client. Optional, will create one if not given Returns: (str) Name of index
[ "Retrieve", "the", "base", "index", "name", "from", "an", "alias" ]
train
https://github.com/workforce-data-initiative/skills-utils/blob/4cf9b7c2938984f34bbcc33d45482d23c52c7539/skills_utils/es.py#L59-L72
workforce-data-initiative/skills-utils
skills_utils/es.py
atomic_swap
def atomic_swap(alias_name, new_index_name, index_client): """Points an alias to a new index, then delete the old index if needed Uses client.update_aliases to perform this with zero downtime Args: alias_name (str) Name of the alias new_index_name (str) The new index that the alias should point to index_client (Elasticsearch.IndicesClient) Elasticsearch index client """ logging.info('Performing atomic index alias swap') if index_client.exists_alias(name=alias_name): old_index_name = get_index_from_alias(alias_name, index_client) logging.info('Removing old as well as adding new') actions = {'actions': [ {'remove': {'index': old_index_name, 'alias': alias_name}}, {'add': {'index': new_index_name, 'alias': alias_name}} ]} index_client.update_aliases(body=actions) index_client.delete(index=old_index_name) else: logging.info('Old alias not found, only adding new') actions = {'actions': [ {'add': {'index': new_index_name, 'alias': alias_name}} ]} index_client.update_aliases(body=actions)
python
def atomic_swap(alias_name, new_index_name, index_client): """Points an alias to a new index, then delete the old index if needed Uses client.update_aliases to perform this with zero downtime Args: alias_name (str) Name of the alias new_index_name (str) The new index that the alias should point to index_client (Elasticsearch.IndicesClient) Elasticsearch index client """ logging.info('Performing atomic index alias swap') if index_client.exists_alias(name=alias_name): old_index_name = get_index_from_alias(alias_name, index_client) logging.info('Removing old as well as adding new') actions = {'actions': [ {'remove': {'index': old_index_name, 'alias': alias_name}}, {'add': {'index': new_index_name, 'alias': alias_name}} ]} index_client.update_aliases(body=actions) index_client.delete(index=old_index_name) else: logging.info('Old alias not found, only adding new') actions = {'actions': [ {'add': {'index': new_index_name, 'alias': alias_name}} ]} index_client.update_aliases(body=actions)
[ "def", "atomic_swap", "(", "alias_name", ",", "new_index_name", ",", "index_client", ")", ":", "logging", ".", "info", "(", "'Performing atomic index alias swap'", ")", "if", "index_client", ".", "exists_alias", "(", "name", "=", "alias_name", ")", ":", "old_index...
Points an alias to a new index, then delete the old index if needed Uses client.update_aliases to perform this with zero downtime Args: alias_name (str) Name of the alias new_index_name (str) The new index that the alias should point to index_client (Elasticsearch.IndicesClient) Elasticsearch index client
[ "Points", "an", "alias", "to", "a", "new", "index", "then", "delete", "the", "old", "index", "if", "needed" ]
train
https://github.com/workforce-data-initiative/skills-utils/blob/4cf9b7c2938984f34bbcc33d45482d23c52c7539/skills_utils/es.py#L75-L100
workforce-data-initiative/skills-utils
skills_utils/es.py
zero_downtime_index
def zero_downtime_index(index_name, index_config): """Context manager to create a new index based on a given alias, allow the caller to index it, and then point the alias to the new index Args: index_name (str) Name of an alias that should point to the new index index_config (dict) Configuration for the new index Yields: (name) The full name of the new index """ client = indices_client() temporary_name = index_name + '_' + str(uuid.uuid4()) logging.info('creating index with config %s', index_config) create_index(temporary_name, index_config, client) try: yield temporary_name atomic_swap(index_name, temporary_name, client) except Exception: logging.error( 'deleting temporary index %s due to error:', temporary_name, exc_info=True ) client.delete(index=temporary_name)
python
def zero_downtime_index(index_name, index_config): """Context manager to create a new index based on a given alias, allow the caller to index it, and then point the alias to the new index Args: index_name (str) Name of an alias that should point to the new index index_config (dict) Configuration for the new index Yields: (name) The full name of the new index """ client = indices_client() temporary_name = index_name + '_' + str(uuid.uuid4()) logging.info('creating index with config %s', index_config) create_index(temporary_name, index_config, client) try: yield temporary_name atomic_swap(index_name, temporary_name, client) except Exception: logging.error( 'deleting temporary index %s due to error:', temporary_name, exc_info=True ) client.delete(index=temporary_name)
[ "def", "zero_downtime_index", "(", "index_name", ",", "index_config", ")", ":", "client", "=", "indices_client", "(", ")", "temporary_name", "=", "index_name", "+", "'_'", "+", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", "logging", ".", "info", "(", ...
Context manager to create a new index based on a given alias, allow the caller to index it, and then point the alias to the new index Args: index_name (str) Name of an alias that should point to the new index index_config (dict) Configuration for the new index Yields: (name) The full name of the new index
[ "Context", "manager", "to", "create", "a", "new", "index", "based", "on", "a", "given", "alias", "allow", "the", "caller", "to", "index", "it", "and", "then", "point", "the", "alias", "to", "the", "new", "index" ]
train
https://github.com/workforce-data-initiative/skills-utils/blob/4cf9b7c2938984f34bbcc33d45482d23c52c7539/skills_utils/es.py#L104-L127
workforce-data-initiative/skills-utils
skills_utils/es.py
ElasticsearchIndexerBase.replace
def replace(self): """Replace index with a new one zero_downtime_index for safety and rollback """ with zero_downtime_index(self.alias_name, self.index_config()) as target_index: self.index_all(target_index)
python
def replace(self): """Replace index with a new one zero_downtime_index for safety and rollback """ with zero_downtime_index(self.alias_name, self.index_config()) as target_index: self.index_all(target_index)
[ "def", "replace", "(", "self", ")", ":", "with", "zero_downtime_index", "(", "self", ".", "alias_name", ",", "self", ".", "index_config", "(", ")", ")", "as", "target_index", ":", "self", ".", "index_all", "(", "target_index", ")" ]
Replace index with a new one zero_downtime_index for safety and rollback
[ "Replace", "index", "with", "a", "new", "one", "zero_downtime_index", "for", "safety", "and", "rollback" ]
train
https://github.com/workforce-data-initiative/skills-utils/blob/4cf9b7c2938984f34bbcc33d45482d23c52c7539/skills_utils/es.py#L154-L159
workforce-data-initiative/skills-utils
skills_utils/es.py
ElasticsearchIndexerBase.append
def append(self): """Index documents onto an existing index""" target_index = get_index_from_alias(self.alias_name) if not target_index: self.replace() else: self.index_all(target_index)
python
def append(self): """Index documents onto an existing index""" target_index = get_index_from_alias(self.alias_name) if not target_index: self.replace() else: self.index_all(target_index)
[ "def", "append", "(", "self", ")", ":", "target_index", "=", "get_index_from_alias", "(", "self", ".", "alias_name", ")", "if", "not", "target_index", ":", "self", ".", "replace", "(", ")", "else", ":", "self", ".", "index_all", "(", "target_index", ")" ]
Index documents onto an existing index
[ "Index", "documents", "onto", "an", "existing", "index" ]
train
https://github.com/workforce-data-initiative/skills-utils/blob/4cf9b7c2938984f34bbcc33d45482d23c52c7539/skills_utils/es.py#L161-L167
workforce-data-initiative/skills-utils
skills_utils/es.py
ElasticsearchIndexerBase.index_all
def index_all(self, index_name): """Index all available documents, using streaming_bulk for speed Args: index_name (string): The index """ oks = 0 notoks = 0 for ok, item in streaming_bulk( self.es_client, self._iter_documents(index_name) ): if ok: oks += 1 else: notoks += 1 logging.info( "Import results: %d ok, %d not ok", oks, notoks )
python
def index_all(self, index_name): """Index all available documents, using streaming_bulk for speed Args: index_name (string): The index """ oks = 0 notoks = 0 for ok, item in streaming_bulk( self.es_client, self._iter_documents(index_name) ): if ok: oks += 1 else: notoks += 1 logging.info( "Import results: %d ok, %d not ok", oks, notoks )
[ "def", "index_all", "(", "self", ",", "index_name", ")", ":", "oks", "=", "0", "notoks", "=", "0", "for", "ok", ",", "item", "in", "streaming_bulk", "(", "self", ".", "es_client", ",", "self", ".", "_iter_documents", "(", "index_name", ")", ")", ":", ...
Index all available documents, using streaming_bulk for speed Args: index_name (string): The index
[ "Index", "all", "available", "documents", "using", "streaming_bulk", "for", "speed", "Args", ":" ]
train
https://github.com/workforce-data-initiative/skills-utils/blob/4cf9b7c2938984f34bbcc33d45482d23c52c7539/skills_utils/es.py#L169-L189
ColinDuquesnoy/pyqt_distutils
pyqt_distutils/utils.py
build_args
def build_args(cmd, src, dst): """ Build arguments list for passing to subprocess.call_check :param cmd str: Command string to interpolate src and dst filepaths into. Typically the output of `config.Config.uic_command` or `config.Config.rcc_command`. :param src str: Source filepath. :param dst str: Destination filepath. """ cmd = cmd % (quote(src), quote(dst)) args = shlex.split(cmd) return [arg for arg in args if arg]
python
def build_args(cmd, src, dst): """ Build arguments list for passing to subprocess.call_check :param cmd str: Command string to interpolate src and dst filepaths into. Typically the output of `config.Config.uic_command` or `config.Config.rcc_command`. :param src str: Source filepath. :param dst str: Destination filepath. """ cmd = cmd % (quote(src), quote(dst)) args = shlex.split(cmd) return [arg for arg in args if arg]
[ "def", "build_args", "(", "cmd", ",", "src", ",", "dst", ")", ":", "cmd", "=", "cmd", "%", "(", "quote", "(", "src", ")", ",", "quote", "(", "dst", ")", ")", "args", "=", "shlex", ".", "split", "(", "cmd", ")", "return", "[", "arg", "for", "a...
Build arguments list for passing to subprocess.call_check :param cmd str: Command string to interpolate src and dst filepaths into. Typically the output of `config.Config.uic_command` or `config.Config.rcc_command`. :param src str: Source filepath. :param dst str: Destination filepath.
[ "Build", "arguments", "list", "for", "passing", "to", "subprocess", ".", "call_check" ]
train
https://github.com/ColinDuquesnoy/pyqt_distutils/blob/7387d64ea2db3b1dafb09d006266cec580131f7d/pyqt_distutils/utils.py#L17-L29
devopshq/crosspm
crosspm/helpers/output.py
register_output_format
def register_output_format(name): """ Load output format function to dictionary (decorator with this function name) """ def check_decorator(fn): _output_format_map[name] = fn def wrapper(*args, **kwargs): return fn(*args, **kwargs) return wrapper return check_decorator
python
def register_output_format(name): """ Load output format function to dictionary (decorator with this function name) """ def check_decorator(fn): _output_format_map[name] = fn def wrapper(*args, **kwargs): return fn(*args, **kwargs) return wrapper return check_decorator
[ "def", "register_output_format", "(", "name", ")", ":", "def", "check_decorator", "(", "fn", ")", ":", "_output_format_map", "[", "name", "]", "=", "fn", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "fn", "(", "*", ...
Load output format function to dictionary (decorator with this function name)
[ "Load", "output", "format", "function", "to", "dictionary", "(", "decorator", "with", "this", "function", "name", ")" ]
train
https://github.com/devopshq/crosspm/blob/c831442ecfaa1d43c66cb148857096cea292c950/crosspm/helpers/output.py#L55-L68
devopshq/crosspm
crosspm/helpers/output.py
Output.write_output
def write_output(self, params, packages): """ Функция вызывает определенную функцию для фиксированного out-format :param params: :param packages: :return: """ if params['out_format'] not in _output_format_map: raise CrosspmException( CROSSPM_ERRORCODE_UNKNOWN_OUT_TYPE, 'Unknown out_format: [{}]'.format(params['out_format']), ) f = _output_format_map[params['out_format']] result = f(self, packages, **params) if result: out_file_path = os.path.realpath(os.path.expanduser(params['output'])) self.write_to_file(result, out_file_path) self._log.info( 'Write packages info to file [%s]\ncontent:\n\n%s', out_file_path, result, )
python
def write_output(self, params, packages): """ Функция вызывает определенную функцию для фиксированного out-format :param params: :param packages: :return: """ if params['out_format'] not in _output_format_map: raise CrosspmException( CROSSPM_ERRORCODE_UNKNOWN_OUT_TYPE, 'Unknown out_format: [{}]'.format(params['out_format']), ) f = _output_format_map[params['out_format']] result = f(self, packages, **params) if result: out_file_path = os.path.realpath(os.path.expanduser(params['output'])) self.write_to_file(result, out_file_path) self._log.info( 'Write packages info to file [%s]\ncontent:\n\n%s', out_file_path, result, )
[ "def", "write_output", "(", "self", ",", "params", ",", "packages", ")", ":", "if", "params", "[", "'out_format'", "]", "not", "in", "_output_format_map", ":", "raise", "CrosspmException", "(", "CROSSPM_ERRORCODE_UNKNOWN_OUT_TYPE", ",", "'Unknown out_format: [{}]'", ...
Функция вызывает определенную функцию для фиксированного out-format :param params: :param packages: :return:
[ "Функция", "вызывает", "определенную", "функцию", "для", "фиксированного", "out", "-", "format", ":", "param", "params", ":", ":", "param", "packages", ":", ":", "return", ":" ]
train
https://github.com/devopshq/crosspm/blob/c831442ecfaa1d43c66cb148857096cea292c950/crosspm/helpers/output.py#L202-L225
devopshq/crosspm
crosspm/helpers/output.py
Output.output_format_lock
def output_format_lock(self, packages, **kwargs): """ Text to lock file """ self._output_config['type'] = PLAIN text = '' tmp_packages = OrderedDict() columns = self._config.get_columns() widths = {} for _pkg in packages.values(): _pkg_name = _pkg.package_name _params = _pkg.get_params(columns, merged=True, raw=False) if _pkg_name not in tmp_packages: tmp_packages[_pkg_name] = _params comment = 1 for _col in columns: widths[_col] = max(widths.get(_col, len(_col)), len(str(_params.get(_col, '')))) + comment comment = 0 comment = 1 for _col in columns: text += '{}{} '.format(_col, ' ' * (widths[_col] - len(_col) - comment)) comment = 0 text = '#{}\n'.format(text.strip()) for _pkg_name in sorted(tmp_packages, key=lambda x: str(x).lower()): _pkg = tmp_packages[_pkg_name] line = '' for _col in columns: line += '{}{} '.format(_pkg[_col], ' ' * (widths[_col] - len(str(_pkg[_col])))) text += '{}\n'.format(line.strip()) return text
python
def output_format_lock(self, packages, **kwargs): """ Text to lock file """ self._output_config['type'] = PLAIN text = '' tmp_packages = OrderedDict() columns = self._config.get_columns() widths = {} for _pkg in packages.values(): _pkg_name = _pkg.package_name _params = _pkg.get_params(columns, merged=True, raw=False) if _pkg_name not in tmp_packages: tmp_packages[_pkg_name] = _params comment = 1 for _col in columns: widths[_col] = max(widths.get(_col, len(_col)), len(str(_params.get(_col, '')))) + comment comment = 0 comment = 1 for _col in columns: text += '{}{} '.format(_col, ' ' * (widths[_col] - len(_col) - comment)) comment = 0 text = '#{}\n'.format(text.strip()) for _pkg_name in sorted(tmp_packages, key=lambda x: str(x).lower()): _pkg = tmp_packages[_pkg_name] line = '' for _col in columns: line += '{}{} '.format(_pkg[_col], ' ' * (widths[_col] - len(str(_pkg[_col])))) text += '{}\n'.format(line.strip()) return text
[ "def", "output_format_lock", "(", "self", ",", "packages", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_output_config", "[", "'type'", "]", "=", "PLAIN", "text", "=", "''", "tmp_packages", "=", "OrderedDict", "(", ")", "columns", "=", "self", ".", ...
Text to lock file
[ "Text", "to", "lock", "file" ]
train
https://github.com/devopshq/crosspm/blob/c831442ecfaa1d43c66cb148857096cea292c950/crosspm/helpers/output.py#L276-L303
devopshq/crosspm
crosspm/helpers/output.py
Output.output_format_module
def output_format_module(self, packages, esc_path=False): """ Create out with child first position """ def create_ordered_list(packages_): """ Recursive for package.packages """ list_ = [] for _pkg_name in packages_: _pkg = packages_[_pkg_name] if _pkg and _pkg.packages: list_.extend(create_ordered_list(_pkg.packages)) if _pkg: _pkg_params = _pkg.get_params(self._columns, True) _res_item = {} for item in self._output_config['columns']: name = item['name'].format(OutFormat(item['column'])) value = _pkg_params.get(item['column'], '') if not isinstance(value, (list, dict, tuple)): try: value = item['value'].format( OutFormat(value, (item['column'] == 'path') if esc_path else False)) except Exception: value = '' # TODO: implement this: # if not value: # try: # value = item['value'].format(OutFormat(_pkg.get_params('', True))) # except Exception as e: # pass _res_item[name] = value list_.append(_res_item) return list_ result_list = create_ordered_list(packages, ) if self._output_config['type'] == LIST: return result_list result = OrderedDict() for item in result_list: # TODO: Error handling name = item[self._output_config['key']] if self._output_config['value']: value = item[self._output_config['value']] else: value = OrderedDict([(k, v) for k, v in item.items() if k != self._output_config['key']]) result[name] = value return result
python
def output_format_module(self, packages, esc_path=False): """ Create out with child first position """ def create_ordered_list(packages_): """ Recursive for package.packages """ list_ = [] for _pkg_name in packages_: _pkg = packages_[_pkg_name] if _pkg and _pkg.packages: list_.extend(create_ordered_list(_pkg.packages)) if _pkg: _pkg_params = _pkg.get_params(self._columns, True) _res_item = {} for item in self._output_config['columns']: name = item['name'].format(OutFormat(item['column'])) value = _pkg_params.get(item['column'], '') if not isinstance(value, (list, dict, tuple)): try: value = item['value'].format( OutFormat(value, (item['column'] == 'path') if esc_path else False)) except Exception: value = '' # TODO: implement this: # if not value: # try: # value = item['value'].format(OutFormat(_pkg.get_params('', True))) # except Exception as e: # pass _res_item[name] = value list_.append(_res_item) return list_ result_list = create_ordered_list(packages, ) if self._output_config['type'] == LIST: return result_list result = OrderedDict() for item in result_list: # TODO: Error handling name = item[self._output_config['key']] if self._output_config['value']: value = item[self._output_config['value']] else: value = OrderedDict([(k, v) for k, v in item.items() if k != self._output_config['key']]) result[name] = value return result
[ "def", "output_format_module", "(", "self", ",", "packages", ",", "esc_path", "=", "False", ")", ":", "def", "create_ordered_list", "(", "packages_", ")", ":", "\"\"\"\n Recursive for package.packages\n \"\"\"", "list_", "=", "[", "]", "for", "_p...
Create out with child first position
[ "Create", "out", "with", "child", "first", "position" ]
train
https://github.com/devopshq/crosspm/blob/c831442ecfaa1d43c66cb148857096cea292c950/crosspm/helpers/output.py#L305-L357
ebertti/django-admin-easy
easy/admin/decorators.py
smart
def smart(**kwargs): """ Simple decorator to get custom fields on admin class, using this you will use less line codes :param short_description: description of custom field :type str: :param admin_order_field: field to order on click :type str: :param allow_tags: allow html tags :type bool: :param boolean: if field is True, False or None :type bool: :param empty_value_display: Default value when field is null :type str: :return: method decorated :rtype: method """ def decorator(func): for key, value in kwargs.items(): setattr(func, key, value) return func return decorator
python
def smart(**kwargs): """ Simple decorator to get custom fields on admin class, using this you will use less line codes :param short_description: description of custom field :type str: :param admin_order_field: field to order on click :type str: :param allow_tags: allow html tags :type bool: :param boolean: if field is True, False or None :type bool: :param empty_value_display: Default value when field is null :type str: :return: method decorated :rtype: method """ def decorator(func): for key, value in kwargs.items(): setattr(func, key, value) return func return decorator
[ "def", "smart", "(", "*", "*", "kwargs", ")", ":", "def", "decorator", "(", "func", ")", ":", "for", "key", ",", "value", "in", "kwargs", ".", "items", "(", ")", ":", "setattr", "(", "func", ",", "key", ",", "value", ")", "return", "func", "retur...
Simple decorator to get custom fields on admin class, using this you will use less line codes :param short_description: description of custom field :type str: :param admin_order_field: field to order on click :type str: :param allow_tags: allow html tags :type bool: :param boolean: if field is True, False or None :type bool: :param empty_value_display: Default value when field is null :type str: :return: method decorated :rtype: method
[ "Simple", "decorator", "to", "get", "custom", "fields", "on", "admin", "class", "using", "this", "you", "will", "use", "less", "line", "codes" ]
train
https://github.com/ebertti/django-admin-easy/blob/fff5229d2b5ccee2010df9b37ea423fe96d913f7/easy/admin/decorators.py#L9-L37
martinmcbride/pysound
pysound/buffer.py
create_buffer
def create_buffer(params, value): ''' If the value is a float, create a numpy array of the required length, filled with value If the value is a numpy array, check its length Otherwise throw a type error ''' try: fv = float(value) return np.full(params.length, fv, np.float) except TypeError: if isinstance(value, np.ndarray): if (len(value)>=params.length): return value raise TypeError('Value must be a float or a numpy array ofthe required length')
python
def create_buffer(params, value): ''' If the value is a float, create a numpy array of the required length, filled with value If the value is a numpy array, check its length Otherwise throw a type error ''' try: fv = float(value) return np.full(params.length, fv, np.float) except TypeError: if isinstance(value, np.ndarray): if (len(value)>=params.length): return value raise TypeError('Value must be a float or a numpy array ofthe required length')
[ "def", "create_buffer", "(", "params", ",", "value", ")", ":", "try", ":", "fv", "=", "float", "(", "value", ")", "return", "np", ".", "full", "(", "params", ".", "length", ",", "fv", ",", "np", ".", "float", ")", "except", "TypeError", ":", "if", ...
If the value is a float, create a numpy array of the required length, filled with value If the value is a numpy array, check its length Otherwise throw a type error
[ "If", "the", "value", "is", "a", "float", "create", "a", "numpy", "array", "of", "the", "required", "length", "filled", "with", "value", "If", "the", "value", "is", "a", "numpy", "array", "check", "its", "length", "Otherwise", "throw", "a", "type", "erro...
train
https://github.com/martinmcbride/pysound/blob/253c8f712ad475318350e5a8ba21f6fefd7a3de2/pysound/buffer.py#L76-L89
martinmcbride/pysound
pysound/buffer.py
BufferParams.set_time
def set_time(self, time): ''' Update the length of the buffer in seconds :param time: number of seconds :return: self ''' self.length = int(time*self.sample_rate) return self
python
def set_time(self, time): ''' Update the length of the buffer in seconds :param time: number of seconds :return: self ''' self.length = int(time*self.sample_rate) return self
[ "def", "set_time", "(", "self", ",", "time", ")", ":", "self", ".", "length", "=", "int", "(", "time", "*", "self", ".", "sample_rate", ")", "return", "self" ]
Update the length of the buffer in seconds :param time: number of seconds :return: self
[ "Update", "the", "length", "of", "the", "buffer", "in", "seconds", ":", "param", "time", ":", "number", "of", "seconds", ":", "return", ":", "self" ]
train
https://github.com/martinmcbride/pysound/blob/253c8f712ad475318350e5a8ba21f6fefd7a3de2/pysound/buffer.py#L37-L44
wasp/waspy
waspy/client.py
Client.make_request
def make_request(self, method, service, path, body=None, query_params: QueryParams=None, headers: dict=None, correlation_id: str=None, content_type: str='application/json', context: Request=None, timeout=30, **kwargs) -> asyncio.coroutine: """ Make a request to another service. If `context` is provided, then context and correlation will be pulled from the provided request object for you. This includes credentials, correlationid, and service-headers. :param method: GET/PUT/PATCH, etc. :param service: name of service :param path: request object path :param body: body of request :param query_params: :param headers: :param correlation_id: :param content_type: :param context: A request object from which a "child-request" will be made :param timeout: Time in seconds the client will wait befor raising an asyncio.TimeoutError :param kwargs: Just a place holder so transport specific options can be passed through :return: """ if not isinstance(method, Methods): method = Methods(method.upper()) if content_type == 'application/json' and isinstance(body, dict): body = json.dumps(body) if isinstance(query_params, dict): query_string = parse.urlencode(query_params) elif isinstance(query_params, QueryParams): query_string = str(query_params) else: query_string = '' headers = headers or {} ctx = request_context.get() if context: warnings.warn("Passing in a context to waspy client is deprecated. " "Passed in context will be ignored", DeprecationWarning) if not correlation_id: correlation_id = ctx['correlation_id'] headers = {**headers, **ctx['ctx_headers']} exchange = headers.get('ctx-exchange-override', None) if exchange: kwargs['exchange'] = 'amq.headers' if isinstance(body, str): body = body.encode() response = asyncio.wait_for( self.transport.make_request( service, method.name, path, body=body, query=query_string, headers=headers, correlation_id=correlation_id, content_type=content_type, timeout=timeout, **kwargs), timeout=timeout) return response
python
def make_request(self, method, service, path, body=None, query_params: QueryParams=None, headers: dict=None, correlation_id: str=None, content_type: str='application/json', context: Request=None, timeout=30, **kwargs) -> asyncio.coroutine: """ Make a request to another service. If `context` is provided, then context and correlation will be pulled from the provided request object for you. This includes credentials, correlationid, and service-headers. :param method: GET/PUT/PATCH, etc. :param service: name of service :param path: request object path :param body: body of request :param query_params: :param headers: :param correlation_id: :param content_type: :param context: A request object from which a "child-request" will be made :param timeout: Time in seconds the client will wait befor raising an asyncio.TimeoutError :param kwargs: Just a place holder so transport specific options can be passed through :return: """ if not isinstance(method, Methods): method = Methods(method.upper()) if content_type == 'application/json' and isinstance(body, dict): body = json.dumps(body) if isinstance(query_params, dict): query_string = parse.urlencode(query_params) elif isinstance(query_params, QueryParams): query_string = str(query_params) else: query_string = '' headers = headers or {} ctx = request_context.get() if context: warnings.warn("Passing in a context to waspy client is deprecated. " "Passed in context will be ignored", DeprecationWarning) if not correlation_id: correlation_id = ctx['correlation_id'] headers = {**headers, **ctx['ctx_headers']} exchange = headers.get('ctx-exchange-override', None) if exchange: kwargs['exchange'] = 'amq.headers' if isinstance(body, str): body = body.encode() response = asyncio.wait_for( self.transport.make_request( service, method.name, path, body=body, query=query_string, headers=headers, correlation_id=correlation_id, content_type=content_type, timeout=timeout, **kwargs), timeout=timeout) return response
[ "def", "make_request", "(", "self", ",", "method", ",", "service", ",", "path", ",", "body", "=", "None", ",", "query_params", ":", "QueryParams", "=", "None", ",", "headers", ":", "dict", "=", "None", ",", "correlation_id", ":", "str", "=", "None", ",...
Make a request to another service. If `context` is provided, then context and correlation will be pulled from the provided request object for you. This includes credentials, correlationid, and service-headers. :param method: GET/PUT/PATCH, etc. :param service: name of service :param path: request object path :param body: body of request :param query_params: :param headers: :param correlation_id: :param content_type: :param context: A request object from which a "child-request" will be made :param timeout: Time in seconds the client will wait befor raising an asyncio.TimeoutError :param kwargs: Just a place holder so transport specific options can be passed through :return:
[ "Make", "a", "request", "to", "another", "service", ".", "If", "context", "is", "provided", "then", "context", "and", "correlation", "will", "be", "pulled", "from", "the", "provided", "request", "object", "for", "you", ".", "This", "includes", "credentials", ...
train
https://github.com/wasp/waspy/blob/31cc352f300a089f9607d7f13d93591d4c69d5ec/waspy/client.py#L21-L85
wasp/waspy
waspy/client.py
Client.get
def get(self, service, path, **kwargs): """ Make a get request (this returns a coroutine)""" return self.make_request(Methods.GET, service, path, **kwargs)
python
def get(self, service, path, **kwargs): """ Make a get request (this returns a coroutine)""" return self.make_request(Methods.GET, service, path, **kwargs)
[ "def", "get", "(", "self", ",", "service", ",", "path", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "make_request", "(", "Methods", ".", "GET", ",", "service", ",", "path", ",", "*", "*", "kwargs", ")" ]
Make a get request (this returns a coroutine)
[ "Make", "a", "get", "request", "(", "this", "returns", "a", "coroutine", ")" ]
train
https://github.com/wasp/waspy/blob/31cc352f300a089f9607d7f13d93591d4c69d5ec/waspy/client.py#L87-L89
wasp/waspy
waspy/client.py
Client.put
def put(self, service, path, body, **kwargs): """ Make a put request (this returns a coroutine)""" return self.make_request(Methods.POST, service, path, body=body, **kwargs)
python
def put(self, service, path, body, **kwargs): """ Make a put request (this returns a coroutine)""" return self.make_request(Methods.POST, service, path, body=body, **kwargs)
[ "def", "put", "(", "self", ",", "service", ",", "path", ",", "body", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "make_request", "(", "Methods", ".", "POST", ",", "service", ",", "path", ",", "body", "=", "body", ",", "*", "*", "kwa...
Make a put request (this returns a coroutine)
[ "Make", "a", "put", "request", "(", "this", "returns", "a", "coroutine", ")" ]
train
https://github.com/wasp/waspy/blob/31cc352f300a089f9607d7f13d93591d4c69d5ec/waspy/client.py#L96-L99
wasp/waspy
waspy/client.py
Client.patch
def patch(self, service, path, body, **kwargs): """ Make a patche requests (this returns a coroutine)""" return self.make_request(Methods.PATCH, service, path, body=body, **kwargs)
python
def patch(self, service, path, body, **kwargs): """ Make a patche requests (this returns a coroutine)""" return self.make_request(Methods.PATCH, service, path, body=body, **kwargs)
[ "def", "patch", "(", "self", ",", "service", ",", "path", ",", "body", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "make_request", "(", "Methods", ".", "PATCH", ",", "service", ",", "path", ",", "body", "=", "body", ",", "*", "*", "...
Make a patche requests (this returns a coroutine)
[ "Make", "a", "patche", "requests", "(", "this", "returns", "a", "coroutine", ")" ]
train
https://github.com/wasp/waspy/blob/31cc352f300a089f9607d7f13d93591d4c69d5ec/waspy/client.py#L101-L104
wasp/waspy
waspy/client.py
Client.delete
def delete(self, service, path, **kwargs): """ Make a delete requests (this returns a coroutine)""" return self.make_request(Methods.DELETE, service, path, **kwargs)
python
def delete(self, service, path, **kwargs): """ Make a delete requests (this returns a coroutine)""" return self.make_request(Methods.DELETE, service, path, **kwargs)
[ "def", "delete", "(", "self", ",", "service", ",", "path", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "make_request", "(", "Methods", ".", "DELETE", ",", "service", ",", "path", ",", "*", "*", "kwargs", ")" ]
Make a delete requests (this returns a coroutine)
[ "Make", "a", "delete", "requests", "(", "this", "returns", "a", "coroutine", ")" ]
train
https://github.com/wasp/waspy/blob/31cc352f300a089f9607d7f13d93591d4c69d5ec/waspy/client.py#L106-L108
proteanhq/protean
src/protean/core/usecase/generic.py
ShowUseCase.process_request
def process_request(self, request_object): """Fetch Resource and return Entity""" identifier = request_object.identifier # Look for the object by its ID and return it resource = request_object.entity_cls.get(identifier) return ResponseSuccess(Status.SUCCESS, resource)
python
def process_request(self, request_object): """Fetch Resource and return Entity""" identifier = request_object.identifier # Look for the object by its ID and return it resource = request_object.entity_cls.get(identifier) return ResponseSuccess(Status.SUCCESS, resource)
[ "def", "process_request", "(", "self", ",", "request_object", ")", ":", "identifier", "=", "request_object", ".", "identifier", "# Look for the object by its ID and return it", "resource", "=", "request_object", ".", "entity_cls", ".", "get", "(", "identifier", ")", "...
Fetch Resource and return Entity
[ "Fetch", "Resource", "and", "return", "Entity" ]
train
https://github.com/proteanhq/protean/blob/0e29873f4aa634aa93cc08ed675dd749c7ed4b0f/src/protean/core/usecase/generic.py#L26-L33
proteanhq/protean
src/protean/core/usecase/generic.py
ListRequestObject.from_dict
def from_dict(cls, entity_cls, adict): """Initialize a ListRequestObject object from a dictionary.""" invalid_req = InvalidRequestObject() # Extract the pagination parameters from the input page = int(adict.pop('page', 1)) per_page = int(adict.pop( 'per_page', getattr(active_config, 'PER_PAGE', 10))) order_by = adict.pop('order_by', ()) # Check for invalid request conditions if page < 0: invalid_req.add_error('page', 'is invalid') if invalid_req.has_errors: return invalid_req # Do we need to pop out random? # adict.pop('random', None) return cls(entity_cls, page, per_page, order_by, adict)
python
def from_dict(cls, entity_cls, adict): """Initialize a ListRequestObject object from a dictionary.""" invalid_req = InvalidRequestObject() # Extract the pagination parameters from the input page = int(adict.pop('page', 1)) per_page = int(adict.pop( 'per_page', getattr(active_config, 'PER_PAGE', 10))) order_by = adict.pop('order_by', ()) # Check for invalid request conditions if page < 0: invalid_req.add_error('page', 'is invalid') if invalid_req.has_errors: return invalid_req # Do we need to pop out random? # adict.pop('random', None) return cls(entity_cls, page, per_page, order_by, adict)
[ "def", "from_dict", "(", "cls", ",", "entity_cls", ",", "adict", ")", ":", "invalid_req", "=", "InvalidRequestObject", "(", ")", "# Extract the pagination parameters from the input", "page", "=", "int", "(", "adict", ".", "pop", "(", "'page'", ",", "1", ")", "...
Initialize a ListRequestObject object from a dictionary.
[ "Initialize", "a", "ListRequestObject", "object", "from", "a", "dictionary", "." ]
train
https://github.com/proteanhq/protean/blob/0e29873f4aa634aa93cc08ed675dd749c7ed4b0f/src/protean/core/usecase/generic.py#L69-L89
proteanhq/protean
src/protean/core/usecase/generic.py
ListUseCase.process_request
def process_request(self, request_object): """Return a list of resources""" resources = (request_object.entity_cls.query .filter(**request_object.filters) .offset((request_object.page - 1) * request_object.per_page) .limit(request_object.per_page) .order_by(request_object.order_by) .all()) return ResponseSuccess(Status.SUCCESS, resources)
python
def process_request(self, request_object): """Return a list of resources""" resources = (request_object.entity_cls.query .filter(**request_object.filters) .offset((request_object.page - 1) * request_object.per_page) .limit(request_object.per_page) .order_by(request_object.order_by) .all()) return ResponseSuccess(Status.SUCCESS, resources)
[ "def", "process_request", "(", "self", ",", "request_object", ")", ":", "resources", "=", "(", "request_object", ".", "entity_cls", ".", "query", ".", "filter", "(", "*", "*", "request_object", ".", "filters", ")", ".", "offset", "(", "(", "request_object", ...
Return a list of resources
[ "Return", "a", "list", "of", "resources" ]
train
https://github.com/proteanhq/protean/blob/0e29873f4aa634aa93cc08ed675dd749c7ed4b0f/src/protean/core/usecase/generic.py#L97-L105
proteanhq/protean
src/protean/core/usecase/generic.py
CreateUseCase.process_request
def process_request(self, request_object): """Process Create Resource Request""" resource = request_object.entity_cls.create(**request_object.data) return ResponseSuccessCreated(resource)
python
def process_request(self, request_object): """Process Create Resource Request""" resource = request_object.entity_cls.create(**request_object.data) return ResponseSuccessCreated(resource)
[ "def", "process_request", "(", "self", ",", "request_object", ")", ":", "resource", "=", "request_object", ".", "entity_cls", ".", "create", "(", "*", "*", "request_object", ".", "data", ")", "return", "ResponseSuccessCreated", "(", "resource", ")" ]
Process Create Resource Request
[ "Process", "Create", "Resource", "Request" ]
train
https://github.com/proteanhq/protean/blob/0e29873f4aa634aa93cc08ed675dd749c7ed4b0f/src/protean/core/usecase/generic.py#L119-L123
proteanhq/protean
src/protean/core/usecase/generic.py
UpdateUseCase.process_request
def process_request(self, request_object): """Process Update Resource Request""" # Retrieve the object by its identifier entity = request_object.entity_cls.get(request_object.identifier) # Update the object and return the updated data resource = entity.update(request_object.data) return ResponseSuccess(Status.SUCCESS, resource)
python
def process_request(self, request_object): """Process Update Resource Request""" # Retrieve the object by its identifier entity = request_object.entity_cls.get(request_object.identifier) # Update the object and return the updated data resource = entity.update(request_object.data) return ResponseSuccess(Status.SUCCESS, resource)
[ "def", "process_request", "(", "self", ",", "request_object", ")", ":", "# Retrieve the object by its identifier", "entity", "=", "request_object", ".", "entity_cls", ".", "get", "(", "request_object", ".", "identifier", ")", "# Update the object and return the updated data...
Process Update Resource Request
[ "Process", "Update", "Resource", "Request" ]
train
https://github.com/proteanhq/protean/blob/0e29873f4aa634aa93cc08ed675dd749c7ed4b0f/src/protean/core/usecase/generic.py#L138-L146
proteanhq/protean
src/protean/core/usecase/generic.py
DeleteUseCase.process_request
def process_request(self, request_object): """Process the Delete Resource Request""" # Delete the object by its identifier entity = request_object.entity_cls.get(request_object.identifier) entity.delete() # FIXME Check for return value of `delete()` # We have successfully deleted the object. # Sending a 204 Response code. return ResponseSuccessWithNoContent()
python
def process_request(self, request_object): """Process the Delete Resource Request""" # Delete the object by its identifier entity = request_object.entity_cls.get(request_object.identifier) entity.delete() # FIXME Check for return value of `delete()` # We have successfully deleted the object. # Sending a 204 Response code. return ResponseSuccessWithNoContent()
[ "def", "process_request", "(", "self", ",", "request_object", ")", ":", "# Delete the object by its identifier", "entity", "=", "request_object", ".", "entity_cls", ".", "get", "(", "request_object", ".", "identifier", ")", "entity", ".", "delete", "(", ")", "# FI...
Process the Delete Resource Request
[ "Process", "the", "Delete", "Resource", "Request" ]
train
https://github.com/proteanhq/protean/blob/0e29873f4aa634aa93cc08ed675dd749c7ed4b0f/src/protean/core/usecase/generic.py#L157-L168
proteanhq/protean
src/protean/core/field/basic.py
String._cast_to_type
def _cast_to_type(self, value): """ Convert the value to its string representation""" if isinstance(value, str) or value is None: return value return str(value)
python
def _cast_to_type(self, value): """ Convert the value to its string representation""" if isinstance(value, str) or value is None: return value return str(value)
[ "def", "_cast_to_type", "(", "self", ",", "value", ")", ":", "if", "isinstance", "(", "value", ",", "str", ")", "or", "value", "is", "None", ":", "return", "value", "return", "str", "(", "value", ")" ]
Convert the value to its string representation
[ "Convert", "the", "value", "to", "its", "string", "representation" ]
train
https://github.com/proteanhq/protean/blob/0e29873f4aa634aa93cc08ed675dd749c7ed4b0f/src/protean/core/field/basic.py#L32-L36
proteanhq/protean
src/protean/core/field/basic.py
Integer._cast_to_type
def _cast_to_type(self, value): """ Convert the value to an int and raise error on failures""" try: return int(value) except (ValueError, TypeError): self.fail('invalid', value=value)
python
def _cast_to_type(self, value): """ Convert the value to an int and raise error on failures""" try: return int(value) except (ValueError, TypeError): self.fail('invalid', value=value)
[ "def", "_cast_to_type", "(", "self", ",", "value", ")", ":", "try", ":", "return", "int", "(", "value", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "self", ".", "fail", "(", "'invalid'", ",", "value", "=", "value", ")" ]
Convert the value to an int and raise error on failures
[ "Convert", "the", "value", "to", "an", "int", "and", "raise", "error", "on", "failures" ]
train
https://github.com/proteanhq/protean/blob/0e29873f4aa634aa93cc08ed675dd749c7ed4b0f/src/protean/core/field/basic.py#L73-L78
proteanhq/protean
src/protean/core/field/basic.py
Float._cast_to_type
def _cast_to_type(self, value): """ Convert the value to a float and raise error on failures""" try: return float(value) except (ValueError, TypeError): self.fail('invalid', value=value)
python
def _cast_to_type(self, value): """ Convert the value to a float and raise error on failures""" try: return float(value) except (ValueError, TypeError): self.fail('invalid', value=value)
[ "def", "_cast_to_type", "(", "self", ",", "value", ")", ":", "try", ":", "return", "float", "(", "value", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "self", ".", "fail", "(", "'invalid'", ",", "value", "=", "value", ")" ]
Convert the value to a float and raise error on failures
[ "Convert", "the", "value", "to", "a", "float", "and", "raise", "error", "on", "failures" ]
train
https://github.com/proteanhq/protean/blob/0e29873f4aa634aa93cc08ed675dd749c7ed4b0f/src/protean/core/field/basic.py#L101-L106
proteanhq/protean
src/protean/core/field/basic.py
Boolean._cast_to_type
def _cast_to_type(self, value): """ Convert the value to a boolean and raise error on failures""" if value in (True, False): return bool(value) if value in ('t', 'True', '1'): return True if value in ('f', 'False', '0'): return False self.fail('invalid', value=value)
python
def _cast_to_type(self, value): """ Convert the value to a boolean and raise error on failures""" if value in (True, False): return bool(value) if value in ('t', 'True', '1'): return True if value in ('f', 'False', '0'): return False self.fail('invalid', value=value)
[ "def", "_cast_to_type", "(", "self", ",", "value", ")", ":", "if", "value", "in", "(", "True", ",", "False", ")", ":", "return", "bool", "(", "value", ")", "if", "value", "in", "(", "'t'", ",", "'True'", ",", "'1'", ")", ":", "return", "True", "i...
Convert the value to a boolean and raise error on failures
[ "Convert", "the", "value", "to", "a", "boolean", "and", "raise", "error", "on", "failures" ]
train
https://github.com/proteanhq/protean/blob/0e29873f4aa634aa93cc08ed675dd749c7ed4b0f/src/protean/core/field/basic.py#L116-L124
proteanhq/protean
src/protean/core/field/basic.py
List._cast_to_type
def _cast_to_type(self, value): """ Raise error if the value is not a list """ if not isinstance(value, list): self.fail('invalid', value=value) return value
python
def _cast_to_type(self, value): """ Raise error if the value is not a list """ if not isinstance(value, list): self.fail('invalid', value=value) return value
[ "def", "_cast_to_type", "(", "self", ",", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "list", ")", ":", "self", ".", "fail", "(", "'invalid'", ",", "value", "=", "value", ")", "return", "value" ]
Raise error if the value is not a list
[ "Raise", "error", "if", "the", "value", "is", "not", "a", "list" ]
train
https://github.com/proteanhq/protean/blob/0e29873f4aa634aa93cc08ed675dd749c7ed4b0f/src/protean/core/field/basic.py#L134-L138
proteanhq/protean
src/protean/core/field/basic.py
Dict._cast_to_type
def _cast_to_type(self, value): """ Raise error if the value is not a dict """ if not isinstance(value, dict): self.fail('invalid', value=value) return value
python
def _cast_to_type(self, value): """ Raise error if the value is not a dict """ if not isinstance(value, dict): self.fail('invalid', value=value) return value
[ "def", "_cast_to_type", "(", "self", ",", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "dict", ")", ":", "self", ".", "fail", "(", "'invalid'", ",", "value", "=", "value", ")", "return", "value" ]
Raise error if the value is not a dict
[ "Raise", "error", "if", "the", "value", "is", "not", "a", "dict" ]
train
https://github.com/proteanhq/protean/blob/0e29873f4aa634aa93cc08ed675dd749c7ed4b0f/src/protean/core/field/basic.py#L148-L152
proteanhq/protean
src/protean/core/field/basic.py
Date._cast_to_type
def _cast_to_type(self, value): """ Convert the value to a date and raise error on failures""" if isinstance(value, datetime.datetime): return value.date() if isinstance(value, datetime.date): return value try: value = date_parser(value) return value.date() except ValueError: self.fail('invalid', value=value)
python
def _cast_to_type(self, value): """ Convert the value to a date and raise error on failures""" if isinstance(value, datetime.datetime): return value.date() if isinstance(value, datetime.date): return value try: value = date_parser(value) return value.date() except ValueError: self.fail('invalid', value=value)
[ "def", "_cast_to_type", "(", "self", ",", "value", ")", ":", "if", "isinstance", "(", "value", ",", "datetime", ".", "datetime", ")", ":", "return", "value", ".", "date", "(", ")", "if", "isinstance", "(", "value", ",", "datetime", ".", "date", ")", ...
Convert the value to a date and raise error on failures
[ "Convert", "the", "value", "to", "a", "date", "and", "raise", "error", "on", "failures" ]
train
https://github.com/proteanhq/protean/blob/0e29873f4aa634aa93cc08ed675dd749c7ed4b0f/src/protean/core/field/basic.py#L176-L186
proteanhq/protean
src/protean/core/field/basic.py
DateTime._cast_to_type
def _cast_to_type(self, value): """ Convert the value to a datetime and raise error on failures""" if isinstance(value, datetime.datetime): return value if isinstance(value, datetime.date): value = datetime.datetime(value.year, value.month, value.day) return value try: value = date_parser(value) return value except ValueError: self.fail('invalid', value=value)
python
def _cast_to_type(self, value): """ Convert the value to a datetime and raise error on failures""" if isinstance(value, datetime.datetime): return value if isinstance(value, datetime.date): value = datetime.datetime(value.year, value.month, value.day) return value try: value = date_parser(value) return value except ValueError: self.fail('invalid', value=value)
[ "def", "_cast_to_type", "(", "self", ",", "value", ")", ":", "if", "isinstance", "(", "value", ",", "datetime", ".", "datetime", ")", ":", "return", "value", "if", "isinstance", "(", "value", ",", "datetime", ".", "date", ")", ":", "value", "=", "datet...
Convert the value to a datetime and raise error on failures
[ "Convert", "the", "value", "to", "a", "datetime", "and", "raise", "error", "on", "failures" ]
train
https://github.com/proteanhq/protean/blob/0e29873f4aa634aa93cc08ed675dd749c7ed4b0f/src/protean/core/field/basic.py#L193-L204
wasp/waspy
waspy/transports/transportabc.py
ClientTransportABC.make_request
async def make_request(self, service: str, method: str, path: str, body: bytes=None, query: str=None, headers: dict=None, correlation_id: str=None, content_type: str=None, timeout:int = 30, **kwargs) -> webtypes.Response: """ Method for actually making a request :param service: service to make request too :param method: HTTP method: GET/PUT/POST etc. :param path: routing path. Should support dots `foo.2.bars` or slashes `foo/2/bars` :param body: request body. Bytes-like object :param query: query string. Example: `foo=bar&cabbage=green` :param headers: Dictionary of headers :param correlation_id: :param content_type: example: `application/json` :param timeout: time to wait for response in seconds before getting an asyncio.TimeoutError :param kwargs: Should except **kwargs for compatability for other possible options on other transports (for example, http might need a `port` option) :return: """
python
async def make_request(self, service: str, method: str, path: str, body: bytes=None, query: str=None, headers: dict=None, correlation_id: str=None, content_type: str=None, timeout:int = 30, **kwargs) -> webtypes.Response: """ Method for actually making a request :param service: service to make request too :param method: HTTP method: GET/PUT/POST etc. :param path: routing path. Should support dots `foo.2.bars` or slashes `foo/2/bars` :param body: request body. Bytes-like object :param query: query string. Example: `foo=bar&cabbage=green` :param headers: Dictionary of headers :param correlation_id: :param content_type: example: `application/json` :param timeout: time to wait for response in seconds before getting an asyncio.TimeoutError :param kwargs: Should except **kwargs for compatability for other possible options on other transports (for example, http might need a `port` option) :return: """
[ "async", "def", "make_request", "(", "self", ",", "service", ":", "str", ",", "method", ":", "str", ",", "path", ":", "str", ",", "body", ":", "bytes", "=", "None", ",", "query", ":", "str", "=", "None", ",", "headers", ":", "dict", "=", "None", ...
Method for actually making a request :param service: service to make request too :param method: HTTP method: GET/PUT/POST etc. :param path: routing path. Should support dots `foo.2.bars` or slashes `foo/2/bars` :param body: request body. Bytes-like object :param query: query string. Example: `foo=bar&cabbage=green` :param headers: Dictionary of headers :param correlation_id: :param content_type: example: `application/json` :param timeout: time to wait for response in seconds before getting an asyncio.TimeoutError :param kwargs: Should except **kwargs for compatability for other possible options on other transports (for example, http might need a `port` option) :return:
[ "Method", "for", "actually", "making", "a", "request", ":", "param", "service", ":", "service", "to", "make", "request", "too", ":", "param", "method", ":", "HTTP", "method", ":", "GET", "/", "PUT", "/", "POST", "etc", ".", ":", "param", "path", ":", ...
train
https://github.com/wasp/waspy/blob/31cc352f300a089f9607d7f13d93591d4c69d5ec/waspy/transports/transportabc.py#L11-L34
proteanhq/protean
src/protean/core/usecase/base.py
UseCase.execute
def execute(self, request_object): """Generic executor method of all UseCases""" # If the request object is not valid then return a failure response if not request_object.is_valid: return ResponseFailure.build_from_invalid_request( request_object) # Try to process the request and handle any errors encountered try: return self.process_request(request_object) except ValidationError as err: return ResponseFailure.build_unprocessable_error(err.normalized_messages) except ObjectNotFoundError: return ResponseFailure.build_not_found( [{'identifier': 'Object with this ID does not exist.'}]) except Exception as exc: logger.error( f'{self.__class__.__name__} execution failed due to error {exc}', exc_info=True) return ResponseFailure.build_system_error([{exc.__class__.__name__: exc}])
python
def execute(self, request_object): """Generic executor method of all UseCases""" # If the request object is not valid then return a failure response if not request_object.is_valid: return ResponseFailure.build_from_invalid_request( request_object) # Try to process the request and handle any errors encountered try: return self.process_request(request_object) except ValidationError as err: return ResponseFailure.build_unprocessable_error(err.normalized_messages) except ObjectNotFoundError: return ResponseFailure.build_not_found( [{'identifier': 'Object with this ID does not exist.'}]) except Exception as exc: logger.error( f'{self.__class__.__name__} execution failed due to error {exc}', exc_info=True) return ResponseFailure.build_system_error([{exc.__class__.__name__: exc}])
[ "def", "execute", "(", "self", ",", "request_object", ")", ":", "# If the request object is not valid then return a failure response", "if", "not", "request_object", ".", "is_valid", ":", "return", "ResponseFailure", ".", "build_from_invalid_request", "(", "request_object", ...
Generic executor method of all UseCases
[ "Generic", "executor", "method", "of", "all", "UseCases" ]
train
https://github.com/proteanhq/protean/blob/0e29873f4aa634aa93cc08ed675dd749c7ed4b0f/src/protean/core/usecase/base.py#L17-L40
deep-compute/deeputil
deeputil/timer.py
FunctionTimer
def FunctionTimer(on_done=None): ''' To check execution time of a function borrowed from https://medium.com/pythonhive/python-decorator-to-measure-the-execution-time-of-methods-fa04cb6bb36d >>> def logger(details, args, kwargs): #some function that uses the time output ... print(details) ... >>> @FunctionTimer(on_done= logger) ... def foo(t=10): ... print('foo executing...') ... time.sleep(t) ... >>> @FunctionTimer(on_done= logger) ... def bar(t, n): ... for i in range(n): ... print('bar executing...') ... time.sleep(1) ... foo(t) ... >>> bar(3,2) bar executing... bar executing... foo executing... ('foo', 3) ('bar', 5) ''' def decfn(fn): def timed(*args, **kwargs): ts = time.time() result = fn(*args, **kwargs) te = time.time() if on_done: on_done((fn.__name__,int(te - ts)), args, kwargs) else: print(('%r %d sec(s)' % (fn.__name__, (te - ts)))) return result return timed return decfn
python
def FunctionTimer(on_done=None): ''' To check execution time of a function borrowed from https://medium.com/pythonhive/python-decorator-to-measure-the-execution-time-of-methods-fa04cb6bb36d >>> def logger(details, args, kwargs): #some function that uses the time output ... print(details) ... >>> @FunctionTimer(on_done= logger) ... def foo(t=10): ... print('foo executing...') ... time.sleep(t) ... >>> @FunctionTimer(on_done= logger) ... def bar(t, n): ... for i in range(n): ... print('bar executing...') ... time.sleep(1) ... foo(t) ... >>> bar(3,2) bar executing... bar executing... foo executing... ('foo', 3) ('bar', 5) ''' def decfn(fn): def timed(*args, **kwargs): ts = time.time() result = fn(*args, **kwargs) te = time.time() if on_done: on_done((fn.__name__,int(te - ts)), args, kwargs) else: print(('%r %d sec(s)' % (fn.__name__, (te - ts)))) return result return timed return decfn
[ "def", "FunctionTimer", "(", "on_done", "=", "None", ")", ":", "def", "decfn", "(", "fn", ")", ":", "def", "timed", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "ts", "=", "time", ".", "time", "(", ")", "result", "=", "fn", "(", "*", ...
To check execution time of a function borrowed from https://medium.com/pythonhive/python-decorator-to-measure-the-execution-time-of-methods-fa04cb6bb36d >>> def logger(details, args, kwargs): #some function that uses the time output ... print(details) ... >>> @FunctionTimer(on_done= logger) ... def foo(t=10): ... print('foo executing...') ... time.sleep(t) ... >>> @FunctionTimer(on_done= logger) ... def bar(t, n): ... for i in range(n): ... print('bar executing...') ... time.sleep(1) ... foo(t) ... >>> bar(3,2) bar executing... bar executing... foo executing... ('foo', 3) ('bar', 5)
[ "To", "check", "execution", "time", "of", "a", "function", "borrowed", "from", "https", ":", "//", "medium", ".", "com", "/", "pythonhive", "/", "python", "-", "decorator", "-", "to", "-", "measure", "-", "the", "-", "execution", "-", "time", "-", "of"...
train
https://github.com/deep-compute/deeputil/blob/9af5702bc3fd990688bf2aed16c20fa104be66df/deeputil/timer.py#L5-L47
wasp/waspy
waspy/listeners/rabbitmq_listener.py
RabbitMQTransportListener.exchange_declare
async def exchange_declare(self): """ Override this method to change how a exchange is declared """ await self.channel.exchange_declare( self.exchange, self.exchange_type, durable=self.durable, auto_delete=self.auto_delete, no_wait=self.no_wait, )
python
async def exchange_declare(self): """ Override this method to change how a exchange is declared """ await self.channel.exchange_declare( self.exchange, self.exchange_type, durable=self.durable, auto_delete=self.auto_delete, no_wait=self.no_wait, )
[ "async", "def", "exchange_declare", "(", "self", ")", ":", "await", "self", ".", "channel", ".", "exchange_declare", "(", "self", ".", "exchange", ",", "self", ".", "exchange_type", ",", "durable", "=", "self", ".", "durable", ",", "auto_delete", "=", "sel...
Override this method to change how a exchange is declared
[ "Override", "this", "method", "to", "change", "how", "a", "exchange", "is", "declared" ]
train
https://github.com/wasp/waspy/blob/31cc352f300a089f9607d7f13d93591d4c69d5ec/waspy/listeners/rabbitmq_listener.py#L77-L85
wasp/waspy
waspy/listeners/rabbitmq_listener.py
RabbitMQTransportListener.queue_declare
async def queue_declare(self): """ Override this method to change how a queue is declared """ await self.channel.queue_declare( self.queue, durable=self.durable, exclusive=self.exclusive, no_wait=self.no_wait )
python
async def queue_declare(self): """ Override this method to change how a queue is declared """ await self.channel.queue_declare( self.queue, durable=self.durable, exclusive=self.exclusive, no_wait=self.no_wait )
[ "async", "def", "queue_declare", "(", "self", ")", ":", "await", "self", ".", "channel", ".", "queue_declare", "(", "self", ".", "queue", ",", "durable", "=", "self", ".", "durable", ",", "exclusive", "=", "self", ".", "exclusive", ",", "no_wait", "=", ...
Override this method to change how a queue is declared
[ "Override", "this", "method", "to", "change", "how", "a", "queue", "is", "declared" ]
train
https://github.com/wasp/waspy/blob/31cc352f300a089f9607d7f13d93591d4c69d5ec/waspy/listeners/rabbitmq_listener.py#L87-L94
proteanhq/protean
src/protean/impl/repository/dict_repo.py
DictModel.from_entity
def from_entity(cls, entity: Entity) -> 'DictModel': """Convert the entity to a dictionary record """ dict_obj = {} for field_name in entity.meta_.attributes: dict_obj[field_name] = getattr(entity, field_name) return dict_obj
python
def from_entity(cls, entity: Entity) -> 'DictModel': """Convert the entity to a dictionary record """ dict_obj = {} for field_name in entity.meta_.attributes: dict_obj[field_name] = getattr(entity, field_name) return dict_obj
[ "def", "from_entity", "(", "cls", ",", "entity", ":", "Entity", ")", "->", "'DictModel'", ":", "dict_obj", "=", "{", "}", "for", "field_name", "in", "entity", ".", "meta_", ".", "attributes", ":", "dict_obj", "[", "field_name", "]", "=", "getattr", "(", ...
Convert the entity to a dictionary record
[ "Convert", "the", "entity", "to", "a", "dictionary", "record" ]
train
https://github.com/proteanhq/protean/blob/0e29873f4aa634aa93cc08ed675dd749c7ed4b0f/src/protean/impl/repository/dict_repo.py#L30-L35