Search is not available for this dataset
text stringlengths 75 104k |
|---|
def name(self):
""" The instruction name/mnemonic """
if self._name == 'PUSH':
return 'PUSH%d' % self.operand_size
elif self._name == 'DUP':
return 'DUP%d' % self.pops
elif self._name == 'SWAP':
return 'SWAP%d' % (self.pops - 1)
elif self._name... |
def parse_operand(self, buf):
""" Parses an operand from buf
:param buf: a buffer
:type buf: iterator/generator/string
"""
buf = iter(buf)
try:
operand = 0
for _ in range(self.operand_size):
operand <<= 8
op... |
def bytes(self):
""" Encoded instruction """
b = [bytes([self._opcode])]
for offset in reversed(range(self.operand_size)):
b.append(bytes([(self.operand >> offset * 8) & 0xff]))
return b''.join(b) |
def reverse(rev_inputs=REV_INPUTS):
"""
reverse the key, value in dictionary file
:param rev_inputs: the files to be reversed
:return: None
"""
dirname = os.path.dirname(__file__)
for in_file in rev_inputs:
reversed_dict = {}
input_file = in_file + '.txt'
output_file... |
def merge(mer_inputs=MER_INPUTS, mer_output=MER_OUTPUT):
"""
merge the phrase files into one file
:param mer_inputs: the phrase files
:param mer_output: the output file
:return: None
"""
dirname = os.path.dirname(__file__)
output_file = os.path.join(dirname, DICT_DIRECTORY, mer_output)
... |
def _adjust_delay(self, slot, response):
"""Define delay adjustment policy"""
if response.status in self.retry_http_codes:
new_delay = max(slot.delay, 1) * 4
new_delay = max(new_delay, self.mindelay)
new_delay = min(new_delay, self.maxdelay)
slot.delay = n... |
def memberness(context):
'''The likelihood that the context is a "member".'''
if context:
texts = context.xpath('.//*[local-name()="explicitMember"]/text()').extract()
text = str(texts).lower()
if len(texts) > 1:
return 2
elif 'country' in text:
return 2
... |
def parse_10qk(self, response):
'''Parse 10-Q or 10-K XML report.'''
loader = ReportItemLoader(response=response)
item = loader.load_item()
if 'doc_type' in item:
doc_type = item['doc_type']
if doc_type in ('10-Q', '10-K'):
return item
re... |
def camelcase(string):
""" Convert string into camel case.
Args:
string: String to convert.
Returns:
string: Camel case string.
"""
string = re.sub(r"^[\-_\.]", '', str(string))
if not string:
return string
return lowercase(string[0]) + re.sub(r"[\-_\.\s]([a-z])",... |
def capitalcase(string):
"""Convert string into capital case.
First letters will be uppercase.
Args:
string: String to convert.
Returns:
string: Capital case string.
"""
string = str(string)
if not string:
return string
return uppercase(string[0]) + string[1:] |
def pathcase(string):
"""Convert string into path case.
Join punctuation with slash.
Args:
string: String to convert.
Returns:
string: Path cased string.
"""
string = snakecase(string)
if not string:
return string
return re.sub(r"_", "/", string) |
def backslashcase(string):
"""Convert string into spinal case.
Join punctuation with backslash.
Args:
string: String to convert.
Returns:
string: Spinal cased string.
"""
str1 = re.sub(r"_", r"\\", snakecase(string))
return str1 |
def sentencecase(string):
"""Convert string into sentence case.
First letter capped and each punctuations are joined with space.
Args:
string: String to convert.
Returns:
string: Sentence cased string.
"""
joiner = ' '
string = re.sub(r"[\-_\.\s]", joiner, str(string))
... |
def snakecase(string):
"""Convert string into snake case.
Join punctuation with underscore
Args:
string: String to convert.
Returns:
string: Snake cased string.
"""
string = re.sub(r"[\-\.\s]", '_', str(string))
if not string:
return string
return lowercase(st... |
def _check_input(self, input):
"""Checks the validity of the input.
In case of an invalid input throws ValueError.
"""
if isinstance(input, str):
return 'st'
elif isinstance(input, list):
if all(isinstance(item, str) for item in input):
re... |
def build(self, x):
"""Builds the Suffix tree on the given input.
If the input is of type List of Strings:
Generalized Suffix Tree is built.
:param x: String or List of Strings
"""
type = self._check_input(x)
if type == 'st':
x += next(self._terminal... |
def _build_McCreight(self, x):
"""Builds a Suffix tree using McCreight O(n) algorithm.
Algorithm based on:
McCreight, Edward M. "A space-economical suffix tree construction algorithm." - ACM, 1976.
Implementation based on:
UH CS - 58093 String Processing Algorithms Lecture Notes... |
def _build_generalized(self, xs):
"""Builds a Generalized Suffix Tree (GST) from the array of strings provided.
"""
terminal_gen = self._terminalSymbolsGenerator()
_xs = ''.join([x + next(terminal_gen) for x in xs])
self.word = _xs
self._generalized_word_starts(xs)
... |
def _label_generalized(self, node):
"""Helper method that labels the nodes of GST with indexes of strings
found in their descendants.
"""
if node.is_leaf():
x = {self._get_word_start_index(node.idx)}
else:
x = {n for ns in node.transition_links for n in ns... |
def _get_word_start_index(self, idx):
"""Helper method that returns the index of the string based on node's
starting index"""
i = 0
for _idx in self.word_starts[1:]:
if idx < _idx:
return i
else:
i+=1
return i |
def lcs(self, stringIdxs=-1):
"""Returns the Largest Common Substring of Strings provided in stringIdxs.
If stringIdxs is not provided, the LCS of all strings is returned.
::param stringIdxs: Optional: List of indexes of strings.
"""
if stringIdxs == -1 or not isinstance(stringI... |
def _find_lcs(self, node, stringIdxs):
"""Helper method that finds LCS by traversing the labeled GSD."""
nodes = [self._find_lcs(n, stringIdxs)
for (n,_) in node.transition_links
if n.generalized_idxs.issuperset(stringIdxs)]
if nodes == []:
return node
... |
def _generalized_word_starts(self, xs):
"""Helper method returns the starting indexes of strings in GST"""
self.word_starts = []
i = 0
for n in range(len(xs)):
self.word_starts.append(i)
i += len(xs[n]) + 1 |
def find(self, y):
"""Returns starting position of the substring y in the string used for
building the Suffix tree.
:param y: String
:return: Index of the starting position of string y in the string used for building the Suffix tree
-1 if y is not a substring.
"... |
def _edgeLabel(self, node, parent):
"""Helper method, returns the edge label between a node and it's parent"""
return self.word[node.idx + parent.depth : node.idx + node.depth] |
def _terminalSymbolsGenerator(self):
"""Generator of unique terminal symbols used for building the Generalized Suffix Tree.
Unicode Private Use Area U+E000..U+F8FF is used to ensure that terminal symbols
are not part of the input string.
"""
py2 = sys.version[0] < '3'
UPP... |
def _dist(self, x, y, A):
"(x - y)^T A (x - y)"
return scipy.spatial.distance.mahalanobis(x, y, A) ** 2 |
def query(self, i, j):
"Query the oracle to find out whether i and j should be must-linked"
if self.queries_cnt < self.max_queries_cnt:
self.queries_cnt += 1
return self.labels[i] == self.labels[j]
else:
raise MaximumQueriesExceeded |
def preprocess_constraints(ml, cl, n):
"Create a graph of constraints for both must- and cannot-links"
# Represent the graphs using adjacency-lists
ml_graph, cl_graph = {}, {}
for i in range(n):
ml_graph[i] = set()
cl_graph[i] = set()
def add_both(d, i, j):
d[i].add(j)
... |
def make_pmml_pipeline(obj, active_fields = None, target_fields = None):
"""Translates a regular Scikit-Learn estimator or pipeline to a PMML pipeline.
Parameters:
----------
obj: BaseEstimator
The object.
active_fields: list of strings, optional
Feature names. If missing, "x1", "x2", .., "xn" are assumed.
... |
def sklearn2pmml(pipeline, pmml, user_classpath = [], with_repr = False, debug = False, java_encoding = "UTF-8"):
"""Converts a fitted Scikit-Learn pipeline to PMML.
Parameters:
----------
pipeline: PMMLPipeline
The pipeline.
pmml: string
The path to where the PMML document should be stored.
user_classpath... |
def make_tpot_pmml_config(config, user_classpath = []):
"""Translates a regular TPOT configuration to a PMML-compatible TPOT configuration.
Parameters:
----------
obj: config
The configuration dictionary.
user_classpath: list of strings, optional
The paths to JAR files that provide custom Transformer, Select... |
def construct_formset(self):
"""
Returns an instance of the formset
"""
formset_class = self.get_formset()
if hasattr(self, 'get_extra_form_kwargs'):
klass = type(self).__name__
raise DeprecationWarning(
'Calling {0}.get_extra_form_kwargs i... |
def get_formset_kwargs(self):
"""
Returns the keyword arguments for instantiating the formset.
"""
kwargs = self.formset_kwargs.copy()
kwargs.update({
'initial': self.get_initial(),
'prefix': self.get_prefix(),
})
if self.request.method in... |
def get_factory_kwargs(self):
"""
Returns the keyword arguments for calling the formset factory
"""
# Perform deprecation check
for attr in ['extra', 'max_num', 'can_order', 'can_delete', 'ct_field',
'formfield_callback', 'fk_name', 'widgets', 'ct_fk_field']:... |
def get_success_url(self):
"""
Returns the supplied URL.
"""
if self.success_url:
url = self.success_url
else:
# Default to returning to the same page
url = self.request.get_full_path()
return url |
def get_formset_kwargs(self):
"""
Returns the keyword arguments for instantiating the formset.
"""
kwargs = super(ModelFormSetMixin, self).get_formset_kwargs()
kwargs['queryset'] = self.get_queryset()
return kwargs |
def formset_valid(self, formset):
"""
If the formset is valid, save the associated models.
"""
self.object_list = formset.save()
return super(ModelFormSetMixin, self).formset_valid(formset) |
def get_formset_kwargs(self):
"""
Returns the keyword arguments for instantiating the formset.
"""
# Perform deprecation check
if hasattr(self, 'save_as_new'):
klass = type(self).__name__
raise DeprecationWarning(
'Setting `{0}.save_as_new`... |
def get_factory_kwargs(self):
"""
Returns the keyword arguments for calling the formset factory
"""
kwargs = super(BaseInlineFormSetFactory, self).get_factory_kwargs()
kwargs.setdefault('fields', self.fields)
kwargs.setdefault('exclude', self.exclude)
if self.get... |
def get(self, request, *args, **kwargs):
"""
Handles GET requests and instantiates a blank version of the formset.
"""
formset = self.construct_formset()
return self.render_to_response(self.get_context_data(formset=formset)) |
def post(self, request, *args, **kwargs):
"""
Handles POST requests, instantiating a formset instance with the passed
POST variables and then checked for validity.
"""
formset = self.construct_formset()
if formset.is_valid():
return self.formset_valid(formset)... |
def construct_formset(self):
"""
Overrides construct_formset to attach the model class as
an attribute of the returned formset instance.
"""
formset = super(InlineFormSetFactory, self).construct_formset()
formset.model = self.inline_model
return formset |
def forms_valid(self, form, inlines):
"""
If the form and formsets are valid, save the associated models.
"""
response = self.form_valid(form)
for formset in inlines:
formset.save()
return response |
def forms_invalid(self, form, inlines):
"""
If the form or formsets are invalid, re-render the context data with the
data-filled form and formsets and errors.
"""
return self.render_to_response(self.get_context_data(form=form, inlines=inlines)) |
def construct_inlines(self):
"""
Returns the inline formset instances
"""
inline_formsets = []
for inline_class in self.get_inlines():
inline_instance = inline_class(self.model, self.request, self.object, self.kwargs, self)
inline_formset = inline_instance... |
def get(self, request, *args, **kwargs):
"""
Handles GET requests and instantiates a blank version of the form and formsets.
"""
form_class = self.get_form_class()
form = self.get_form(form_class)
inlines = self.construct_inlines()
return self.render_to_response(s... |
def post(self, request, *args, **kwargs):
"""
Handles POST requests, instantiating a form and formset instances with the passed
POST variables and then checked for validity.
"""
form_class = self.get_form_class()
form = self.get_form(form_class)
if form.is_valid(... |
def get_context_data(self, **kwargs):
"""
If `inlines_names` has been defined, add each formset to the context under
its corresponding entry in `inlines_names`
"""
context = {}
inlines_names = self.get_inlines_names()
if inlines_names:
# We have forms... |
def try_convert_to_date(self, word):
"""
Tries to convert word to date(datetime) using search_date_formats
Return None if word fits no one format
"""
for frm in self.search_date_formats:
try:
return datetime.datetime.strptime(word, frm).date()
... |
def get_params_for_field(self, field_name, sort_type=None):
"""
If sort_type is None - inverse current sort for field, if no sorted - use asc
"""
if not sort_type:
if self.initial_sort == field_name:
sort_type = 'desc' if self.initial_sort_type == 'asc' else '... |
def get_start_date(self, obj):
"""
Returns the start date for a model instance
"""
obj_date = getattr(obj, self.get_date_field())
try:
obj_date = obj_date.date()
except AttributeError:
# It's a date rather than datetime, so we use it as is
... |
def get_end_date(self, obj):
"""
Returns the end date for a model instance
"""
obj_date = getattr(obj, self.get_end_date_field())
try:
obj_date = obj_date.date()
except AttributeError:
# It's a date rather than datetime, so we use it as is
... |
def get_first_of_week(self):
"""
Returns an integer representing the first day of the week.
0 represents Monday, 6 represents Sunday.
"""
if self.first_of_week is None:
raise ImproperlyConfigured("%s.first_of_week is required." % self.__class__.__name__)
if s... |
def get_queryset(self):
"""
Returns a queryset of models for the month requested
"""
qs = super(BaseCalendarMonthView, self).get_queryset()
year = self.get_year()
month = self.get_month()
date_field = self.get_date_field()
end_date_field = self.get_end_d... |
def get_context_data(self, **kwargs):
"""
Injects variables necessary for rendering the calendar into the context.
Variables added are: `calendar`, `weekdays`, `month`, `next_month` and `previous_month`.
"""
data = super(BaseCalendarMonthView, self).get_context_data(**kwargs)
... |
def read_version():
"""Read version from __init__.py without loading any files"""
finder = VersionFinder()
path = os.path.join(PROJECT_ROOT, 'colorful', '__init__.py')
with codecs.open(path, 'r', encoding='utf-8') as fp:
file_data = fp.read().encode('utf-8')
finder.visit(ast.parse(file_d... |
def with_setup(self, colormode=None, colorpalette=None, extend_colors=False):
"""
Return a new Colorful object with the given color config.
"""
colorful = Colorful(
colormode=self.colorful.colormode,
colorpalette=copy.copy(self.colorful.colorpalette)
)
... |
def parse_colors(path):
"""Parse the given color files.
Supported are:
* .txt for X11 colors
* .json for colornames
"""
if path.endswith(".txt"):
return parse_rgb_txt_file(path)
elif path.endswith(".json"):
return parse_json_color_file(path)
raise TypeError("col... |
def parse_rgb_txt_file(path):
"""
Parse the given rgb.txt file into a Python dict.
See https://en.wikipedia.org/wiki/X11_color_names for more information
:param str path: the path to the X11 rgb.txt file
"""
#: Holds the generated color dict
color_dict = {}
with open(path, 'r') as rgb... |
def parse_json_color_file(path):
"""Parse a JSON color file.
The JSON has to be in the following format:
.. code:: json
[{"name": "COLOR_NAME", "hex": "#HEX"}, ...]
:param str path: the path to the JSON color file
"""
with open(path, "r") as color_file:
color_list = json.load(... |
def sanitize_color_palette(colorpalette):
"""
Sanitze the given color palette so it can
be safely used by Colorful.
It will convert colors specified in hex RGB to
a RGB channel triplet.
"""
new_palette = {}
def __make_valid_color_name(name):
"""
Convert the given name i... |
def show():
"""
Show the modifiers and colors
"""
# modifiers
sys.stdout.write(colorful.bold('bold') + ' ')
sys.stdout.write(colorful.dimmed('dimmed') + ' ')
sys.stdout.write(colorful.italic('italic') + ' ')
sys.stdout.write(colorful.underlined('underlined') + ' ')
sys.stdout.write(c... |
def show():
"""
Show the modifiers and colors
"""
with colorful.with_style('monokai') as c:
# modifiers
sys.stdout.write(c.bold('bold') + ' ')
sys.stdout.write(c.dimmed('dimmed') + ' ')
sys.stdout.write(c.italic('italic') + ' ')
sys.stdout.write(c.underlined('unde... |
def detect_color_support(env): # noqa
"""
Detect what color palettes are supported.
It'll return a valid color mode to use
with colorful.
:param dict env: the environment dict like returned by ``os.envion``
"""
if env.get('COLORFUL_DISABLE', '0') == '1':
return NO_COLORS
if en... |
def rgb_to_ansi256(r, g, b):
"""
Convert RGB to ANSI 256 color
"""
if r == g and g == b:
if r < 8:
return 16
if r > 248:
return 231
return round(((r - 8) / 247.0) * 24) + 232
ansi_r = 36 * round(r / 255.0 * 5.0)
ansi_g = 6 * round(g / 255.0 * 5.0... |
def rgb_to_ansi16(r, g, b, use_bright=False):
"""
Convert RGB to ANSI 16 color
"""
ansi_b = round(b / 255.0) << 2
ansi_g = round(g / 255.0) << 1
ansi_r = round(r / 255.0)
ansi = (90 if use_bright else 30) + (ansi_b | ansi_g | ansi_r)
return ansi |
def hex_to_rgb(value):
"""
Convert the given hex string to a
valid RGB channel triplet.
"""
value = value.lstrip('#')
check_hex(value)
length = len(value)
step = int(length / 3)
return tuple(int(value[i:i+step], 16) for i in range(0, length, step)) |
def check_hex(value):
"""
Check if the given hex value is a valid RGB color
It should match the format: [0-9a-fA-F]
and be of length 3 or 6.
"""
length = len(value)
if length not in (3, 6):
raise ValueError('Hex string #{} is too long'.format(value))
regex = r'[0-9a-f]{{{length... |
def show():
"""
Show the modifiers and colors
"""
with colorful.with_style('solarized') as c:
# modifiers
sys.stdout.write(c.bold('bold') + ' ')
sys.stdout.write(c.dimmed('dimmed') + ' ')
sys.stdout.write(c.italic('italic') + ' ')
sys.stdout.write(c.underlined('un... |
def translate_rgb_to_ansi_code(red, green, blue, offset, colormode):
"""
Translate the given RGB color into the appropriate ANSI escape code
for the given color mode.
The offset is used for the base color which is used.
The ``colormode`` has to be one of:
* 0: no colors / disabled
*... |
def translate_colorname_to_ansi_code(colorname, offset, colormode, colorpalette):
"""
Translate the given color name to a valid
ANSI escape code.
:parma str colorname: the name of the color to resolve
:parma str offset: the offset for the color code
:param int colormode: the color mode to use. ... |
def resolve_modifier_to_ansi_code(modifiername, colormode):
"""
Resolve the given modifier name to a valid
ANSI escape code.
:param str modifiername: the name of the modifier to resolve
:param int colormode: the color mode to use. See ``translate_rgb_to_ansi_code``
:returns str: the ANSI escap... |
def translate_style(style, colormode, colorpalette):
"""
Translate the given style to an ANSI escape code
sequence.
``style`` examples are:
* green
* bold
* red_on_black
* bold_green
* italic_yellow_on_cyan
:param str style: the style to translate
:param int colormode: the... |
def style_string(string, ansi_style, colormode, nested=False):
"""
Style the given string according to the given
ANSI style string.
:param str string: the string to style
:param tuple ansi_style: the styling string returned by ``translate_style``
:param int colormode: the color mode to use. See... |
def colorpalette(self, colorpalette):
"""
Set the colorpalette which should be used
"""
if isinstance(colorpalette, str): # we assume it's a path to a color file
colorpalette = colors.parse_colors(colorpalette)
self._colorpalette = colors.sanitize_color_palette(colo... |
def setup(self, colormode=None, colorpalette=None, extend_colors=False):
"""
Setup this colorful object by setting a ``colormode`` and
the ``colorpalette`. The ``extend_colors`` flag is used
to extend the currently active color palette instead of
replacing it.
:param int... |
def use_style(self, style_name):
"""
Use a predefined style as color palette
:param str style_name: the name of the style
"""
try:
style = getattr(styles, style_name.upper())
except AttributeError:
raise ColorfulError('the style "{0}" is undefined... |
def format(self, string, *args, **kwargs):
"""
Format the given string with the given ``args`` and ``kwargs``.
The string can contain references to ``c`` which is provided by
this colorful object.
:param str string: the string to format
"""
return string.format(c... |
def print(self, *objects, **options):
"""
Print the given objects to the given file stream.
See https://docs.python.org/3/library/functions.html#print
The only difference to the ``print()`` built-in is that
``Colorful.print()`` formats the string with ``c=self``.
With th... |
def readattr(path, name):
"""
Read attribute from sysfs and return as string
"""
try:
f = open(USB_SYS_PREFIX + path + "/" + name)
return f.readline().rstrip("\n")
except IOError:
return None |
def find_ports(device):
"""
Find the port chain a device is plugged on.
This is done by searching sysfs for a device that matches the device
bus/address combination.
Useful when the underlying usb lib does not return device.port_number for
whatever reason.
"""
bus_id = device.bus
d... |
def set_calibration_data(self, scale=None, offset=None):
"""
Set device calibration data based on settings in /etc/temper.conf.
"""
if scale is not None and offset is not None:
self._scale = scale
self._offset = offset
elif scale is None and offset is None... |
def get_data(self, reset_device=False):
"""
Get data from the USB device.
"""
try:
if reset_device:
self._device.reset()
# detach kernel driver from both interfaces if attached, so we can set_configuration()
for interface in [0,1]:
... |
def get_temperature(self, format='celsius', sensor=0):
"""
Get device temperature reading.
"""
results = self.get_temperatures(sensors=[sensor,])
if format == 'celsius':
return results[sensor]['temperature_c']
elif format == 'fahrenheit':
return r... |
def get_temperatures(self, sensors=None):
"""
Get device temperature reading.
Params:
- sensors: optional list of sensors to get a reading for, examples:
[0,] - get reading for sensor 0
[0, 1,] - get reading for sensors 0 and 1
None - get readings for all s... |
def get_humidity(self, sensors=None):
"""
Get device humidity reading.
Params:
- sensors: optional list of sensors to get a reading for, examples:
[0,] - get reading for sensor 0
[0, 1,] - get reading for sensors 0 and 1
None - get readings for all sensors
... |
def _control_transfer(self, data):
"""
Send device a control request with standard parameters and <data> as
payload.
"""
LOGGER.debug('Ctrl transfer: %r', data)
self._device.ctrl_transfer(bmRequestType=0x21, bRequest=0x09,
wValue=0x0200, wIndex=0x01, data_or_w... |
def _interrupt_read(self):
"""
Read data from device.
"""
data = self._device.read(ENDPOINT, REQ_INT_LEN, timeout=TIMEOUT)
LOGGER.debug('Read data: %r', data)
return data |
def __check_looks_like_uri(self, uri):
"""Checks the URI looks like a RAW uri in github:
- 'https://raw.githubusercontent.com/github/hubot/master/README.md'
- 'https://github.com/github/hubot/raw/master/README.md'
:param uri: uri of the file
"""
if uri.split('/')[2] == ... |
def read_file_from_uri(self, uri):
"""Reads the file from Github
:param uri: URI of the Github raw File
:returns: UTF-8 text with the content
"""
logger.debug("Reading %s" % (uri))
self.__check_looks_like_uri(uri)
try:
req = urllib.request.Request(... |
def measure_memory(cls, obj, seen=None):
"""Recursively finds size of objects"""
size = sys.getsizeof(obj)
if seen is None:
seen = set()
obj_id = id(obj)
if obj_id in seen:
return 0
# Important mark as seen *before* entering recursion to gracefully... |
def __feed_arthur(self):
""" Feed Ocean with backend data collected from arthur redis queue"""
with self.ARTHUR_FEED_LOCK:
# This is a expensive operation so don't do it always
if (time.time() - self.ARTHUR_LAST_MEMORY_CHECK) > 5 * self.ARTHUR_LAST_MEMORY_CHECK_TIME:
... |
def __feed_backend_arthur(self, repo):
""" Feed Ocean with backend data collected from arthur redis queue"""
# Always get pending items from arthur for all data sources
self.__feed_arthur()
tag = self.backend_tag(repo)
logger.debug("Arthur items available for %s", self.arthur_... |
def __create_arthur_json(self, repo, backend_args):
""" Create the JSON for configuring arthur to collect data
https://github.com/grimoirelab/arthur#adding-tasks
Sample for git:
{
"tasks": [
{
"task_id": "arthur.git",
"backend": "git"... |
def sha_github_file(cls, config, repo_file, repository_api, repository_branch):
""" Return the GitHub SHA for a file in the repository """
repo_file_sha = None
cfg = config.get_conf()
github_token = cfg['sortinghat']['identities_api_token']
headers = {"Authorization": "token " ... |
def __get_uuids_from_profile_name(self, profile_name):
""" Get the uuid for a profile name """
uuids = []
with self.db.connect() as session:
query = session.query(Profile).\
filter(Profile.name == profile_name)
profiles = query.all()
if profil... |
def micro_mordred(cfg_path, backend_sections, raw, arthur, identities, enrich, panels):
"""Execute the raw and/or the enrich phases of a given backend section defined in a Mordred configuration file.
:param cfg_path: the path of a Mordred configuration file
:param backend_sections: the backend sections whe... |
def get_raw(config, backend_section, arthur):
"""Execute the raw phase for a given backend section, optionally using Arthur
:param config: a Mordred config object
:param backend_section: the backend section where the raw phase is executed
:param arthur: if true, it enables Arthur to collect the raw dat... |
def get_identities(config):
"""Execute the merge identities phase
:param config: a Mordred config object
"""
TaskProjects(config).execute()
task = TaskIdentitiesMerge(config)
task.execute()
logging.info("Merging identities finished!") |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.