repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
listlengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
listlengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
playpauseandstop/rororo
rororo/settings.py
immutable_settings
def immutable_settings(defaults: Settings, **optionals: Any) -> types.MappingProxyType: r"""Initialize and return immutable Settings dictionary. Settings dictionary allows you to setup settings values from multiple sources and make sure that values cannot be changed, updated by anyone else after initialization. This helps keep things clear and not worry about hidden settings change somewhere around your web application. :param defaults: Read settings values from module or dict-like instance. :param \*\*optionals: Update base settings with optional values. In common additional values shouldn't be passed, if settings values already populated from local settings or environment. But in case of using application factories this makes sense:: from . import settings def create_app(**options): app = ... app.settings = immutable_settings(settings, **options) return app And yes each additional key overwrite default setting value. """ settings = {key: value for key, value in iter_settings(defaults)} for key, value in iter_settings(optionals): settings[key] = value return types.MappingProxyType(settings)
python
def immutable_settings(defaults: Settings, **optionals: Any) -> types.MappingProxyType: r"""Initialize and return immutable Settings dictionary. Settings dictionary allows you to setup settings values from multiple sources and make sure that values cannot be changed, updated by anyone else after initialization. This helps keep things clear and not worry about hidden settings change somewhere around your web application. :param defaults: Read settings values from module or dict-like instance. :param \*\*optionals: Update base settings with optional values. In common additional values shouldn't be passed, if settings values already populated from local settings or environment. But in case of using application factories this makes sense:: from . import settings def create_app(**options): app = ... app.settings = immutable_settings(settings, **options) return app And yes each additional key overwrite default setting value. """ settings = {key: value for key, value in iter_settings(defaults)} for key, value in iter_settings(optionals): settings[key] = value return types.MappingProxyType(settings)
[ "def", "immutable_settings", "(", "defaults", ":", "Settings", ",", "*", "*", "optionals", ":", "Any", ")", "->", "types", ".", "MappingProxyType", ":", "settings", "=", "{", "key", ":", "value", "for", "key", ",", "value", "in", "iter_settings", "(", "d...
r"""Initialize and return immutable Settings dictionary. Settings dictionary allows you to setup settings values from multiple sources and make sure that values cannot be changed, updated by anyone else after initialization. This helps keep things clear and not worry about hidden settings change somewhere around your web application. :param defaults: Read settings values from module or dict-like instance. :param \*\*optionals: Update base settings with optional values. In common additional values shouldn't be passed, if settings values already populated from local settings or environment. But in case of using application factories this makes sense:: from . import settings def create_app(**options): app = ... app.settings = immutable_settings(settings, **options) return app And yes each additional key overwrite default setting value.
[ "r", "Initialize", "and", "return", "immutable", "Settings", "dictionary", "." ]
train
https://github.com/playpauseandstop/rororo/blob/28a04e8028c29647941e727116335e9d6fd64c27/rororo/settings.py#L37-L67
playpauseandstop/rororo
rororo/settings.py
inject_settings
def inject_settings(mixed: Union[str, Settings], context: MutableMapping[str, Any], fail_silently: bool = False) -> None: """Inject settings values to given context. :param mixed: Settings can be a string (that it will be read from Python path), Python module or dict-like instance. :param context: Context to assign settings key values. It should support dict-like item assingment. :param fail_silently: When enabled and reading settings from Python path ignore errors if given Python path couldn't be loaded. """ if isinstance(mixed, str): try: mixed = import_module(mixed) except Exception: if fail_silently: return raise for key, value in iter_settings(mixed): context[key] = value
python
def inject_settings(mixed: Union[str, Settings], context: MutableMapping[str, Any], fail_silently: bool = False) -> None: """Inject settings values to given context. :param mixed: Settings can be a string (that it will be read from Python path), Python module or dict-like instance. :param context: Context to assign settings key values. It should support dict-like item assingment. :param fail_silently: When enabled and reading settings from Python path ignore errors if given Python path couldn't be loaded. """ if isinstance(mixed, str): try: mixed = import_module(mixed) except Exception: if fail_silently: return raise for key, value in iter_settings(mixed): context[key] = value
[ "def", "inject_settings", "(", "mixed", ":", "Union", "[", "str", ",", "Settings", "]", ",", "context", ":", "MutableMapping", "[", "str", ",", "Any", "]", ",", "fail_silently", ":", "bool", "=", "False", ")", "->", "None", ":", "if", "isinstance", "("...
Inject settings values to given context. :param mixed: Settings can be a string (that it will be read from Python path), Python module or dict-like instance. :param context: Context to assign settings key values. It should support dict-like item assingment. :param fail_silently: When enabled and reading settings from Python path ignore errors if given Python path couldn't be loaded.
[ "Inject", "settings", "values", "to", "given", "context", "." ]
train
https://github.com/playpauseandstop/rororo/blob/28a04e8028c29647941e727116335e9d6fd64c27/rororo/settings.py#L70-L94
playpauseandstop/rororo
rororo/settings.py
iter_settings
def iter_settings(mixed: Settings) -> Iterator[Tuple[str, Any]]: """Iterate over settings values from settings module or dict-like instance. :param mixed: Settings instance to iterate. """ if isinstance(mixed, types.ModuleType): for attr in dir(mixed): if not is_setting_key(attr): continue yield (attr, getattr(mixed, attr)) else: yield from filter(lambda item: is_setting_key(item[0]), mixed.items())
python
def iter_settings(mixed: Settings) -> Iterator[Tuple[str, Any]]: """Iterate over settings values from settings module or dict-like instance. :param mixed: Settings instance to iterate. """ if isinstance(mixed, types.ModuleType): for attr in dir(mixed): if not is_setting_key(attr): continue yield (attr, getattr(mixed, attr)) else: yield from filter(lambda item: is_setting_key(item[0]), mixed.items())
[ "def", "iter_settings", "(", "mixed", ":", "Settings", ")", "->", "Iterator", "[", "Tuple", "[", "str", ",", "Any", "]", "]", ":", "if", "isinstance", "(", "mixed", ",", "types", ".", "ModuleType", ")", ":", "for", "attr", "in", "dir", "(", "mixed", ...
Iterate over settings values from settings module or dict-like instance. :param mixed: Settings instance to iterate.
[ "Iterate", "over", "settings", "values", "from", "settings", "module", "or", "dict", "-", "like", "instance", "." ]
train
https://github.com/playpauseandstop/rororo/blob/28a04e8028c29647941e727116335e9d6fd64c27/rororo/settings.py#L124-L135
playpauseandstop/rororo
rororo/settings.py
setup_locale
def setup_locale(lc_all: str, first_weekday: int = None, *, lc_collate: str = None, lc_ctype: str = None, lc_messages: str = None, lc_monetary: str = None, lc_numeric: str = None, lc_time: str = None) -> str: """Shortcut helper to setup locale for backend application. :param lc_all: Locale to use. :param first_weekday: Weekday for start week. 0 for Monday, 6 for Sunday. By default: None :param lc_collate: Collate locale to use. By default: ``<lc_all>`` :param lc_ctype: Ctype locale to use. By default: ``<lc_all>`` :param lc_messages: Messages locale to use. By default: ``<lc_all>`` :param lc_monetary: Monetary locale to use. By default: ``<lc_all>`` :param lc_numeric: Numeric locale to use. By default: ``<lc_all>`` :param lc_time: Time locale to use. By default: ``<lc_all>`` """ if first_weekday is not None: calendar.setfirstweekday(first_weekday) locale.setlocale(locale.LC_COLLATE, lc_collate or lc_all) locale.setlocale(locale.LC_CTYPE, lc_ctype or lc_all) locale.setlocale(locale.LC_MESSAGES, lc_messages or lc_all) locale.setlocale(locale.LC_MONETARY, lc_monetary or lc_all) locale.setlocale(locale.LC_NUMERIC, lc_numeric or lc_all) locale.setlocale(locale.LC_TIME, lc_time or lc_all) return locale.setlocale(locale.LC_ALL, lc_all)
python
def setup_locale(lc_all: str, first_weekday: int = None, *, lc_collate: str = None, lc_ctype: str = None, lc_messages: str = None, lc_monetary: str = None, lc_numeric: str = None, lc_time: str = None) -> str: """Shortcut helper to setup locale for backend application. :param lc_all: Locale to use. :param first_weekday: Weekday for start week. 0 for Monday, 6 for Sunday. By default: None :param lc_collate: Collate locale to use. By default: ``<lc_all>`` :param lc_ctype: Ctype locale to use. By default: ``<lc_all>`` :param lc_messages: Messages locale to use. By default: ``<lc_all>`` :param lc_monetary: Monetary locale to use. By default: ``<lc_all>`` :param lc_numeric: Numeric locale to use. By default: ``<lc_all>`` :param lc_time: Time locale to use. By default: ``<lc_all>`` """ if first_weekday is not None: calendar.setfirstweekday(first_weekday) locale.setlocale(locale.LC_COLLATE, lc_collate or lc_all) locale.setlocale(locale.LC_CTYPE, lc_ctype or lc_all) locale.setlocale(locale.LC_MESSAGES, lc_messages or lc_all) locale.setlocale(locale.LC_MONETARY, lc_monetary or lc_all) locale.setlocale(locale.LC_NUMERIC, lc_numeric or lc_all) locale.setlocale(locale.LC_TIME, lc_time or lc_all) return locale.setlocale(locale.LC_ALL, lc_all)
[ "def", "setup_locale", "(", "lc_all", ":", "str", ",", "first_weekday", ":", "int", "=", "None", ",", "*", ",", "lc_collate", ":", "str", "=", "None", ",", "lc_ctype", ":", "str", "=", "None", ",", "lc_messages", ":", "str", "=", "None", ",", "lc_mon...
Shortcut helper to setup locale for backend application. :param lc_all: Locale to use. :param first_weekday: Weekday for start week. 0 for Monday, 6 for Sunday. By default: None :param lc_collate: Collate locale to use. By default: ``<lc_all>`` :param lc_ctype: Ctype locale to use. By default: ``<lc_all>`` :param lc_messages: Messages locale to use. By default: ``<lc_all>`` :param lc_monetary: Monetary locale to use. By default: ``<lc_all>`` :param lc_numeric: Numeric locale to use. By default: ``<lc_all>`` :param lc_time: Time locale to use. By default: ``<lc_all>``
[ "Shortcut", "helper", "to", "setup", "locale", "for", "backend", "application", "." ]
train
https://github.com/playpauseandstop/rororo/blob/28a04e8028c29647941e727116335e9d6fd64c27/rororo/settings.py#L138-L169
playpauseandstop/rororo
rororo/settings.py
setup_timezone
def setup_timezone(timezone: str) -> None: """Shortcut helper to configure timezone for backend application. :param timezone: Timezone to use, e.g. "UTC", "Europe/Kiev". """ if timezone and hasattr(time, 'tzset'): tz_root = '/usr/share/zoneinfo' tz_filename = os.path.join(tz_root, *(timezone.split('/'))) if os.path.exists(tz_root) and not os.path.exists(tz_filename): raise ValueError('Incorrect timezone value: {0}'.format(timezone)) os.environ['TZ'] = timezone time.tzset()
python
def setup_timezone(timezone: str) -> None: """Shortcut helper to configure timezone for backend application. :param timezone: Timezone to use, e.g. "UTC", "Europe/Kiev". """ if timezone and hasattr(time, 'tzset'): tz_root = '/usr/share/zoneinfo' tz_filename = os.path.join(tz_root, *(timezone.split('/'))) if os.path.exists(tz_root) and not os.path.exists(tz_filename): raise ValueError('Incorrect timezone value: {0}'.format(timezone)) os.environ['TZ'] = timezone time.tzset()
[ "def", "setup_timezone", "(", "timezone", ":", "str", ")", "->", "None", ":", "if", "timezone", "and", "hasattr", "(", "time", ",", "'tzset'", ")", ":", "tz_root", "=", "'/usr/share/zoneinfo'", "tz_filename", "=", "os", ".", "path", ".", "join", "(", "tz...
Shortcut helper to configure timezone for backend application. :param timezone: Timezone to use, e.g. "UTC", "Europe/Kiev".
[ "Shortcut", "helper", "to", "configure", "timezone", "for", "backend", "application", "." ]
train
https://github.com/playpauseandstop/rororo/blob/28a04e8028c29647941e727116335e9d6fd64c27/rororo/settings.py#L172-L185
OSSOS/MOP
src/jjk/preproc/MOPfits_old.py
inputs
def inputs(header): """Read through the HISTORY cards in an image header looking for detrend input lines. Detrend inputs are given on lines like: HISTORY imcombred: file_id We require that the value in file_id be store in the CADC archive before adding to the inputs list. """ import string, re inputs=[] for h in header.ascardlist(): if h.key=="HISTORY": g=h.value result=re.search('imcombred: (\d{6}[bfopd])\d{2} .*',g) if not result: continue file_id=result.group(1) import os status=os.system("adInfo -a CFHT -s "+file_id) if status==0: result=re.search('(\d{6}).*',file_id) if not result: continue expnum=result.group(1) inputs.append(expnum) if len(inputs)==0: ### try using the new FLIPS 2.0 keywords nit = header.get('IMCMB_NI',0) if nit==0: return(inputs) for nin in range(nit): kwd='IMCMB_'+str(nin).zfill(2) file=(header.get(kwd,'')) result=re.search('.*(\d{6}[bfopd]).*',g) if not result: continue file_id=result.group(1) import os status=os.system("adInfo -a CFHT -s "+file_id) if status==0: result=re.search('(\d{6}).*',file_id) if not result: continue expnum=result.group(1) inputs.append(expnum) return inputs
python
def inputs(header): """Read through the HISTORY cards in an image header looking for detrend input lines. Detrend inputs are given on lines like: HISTORY imcombred: file_id We require that the value in file_id be store in the CADC archive before adding to the inputs list. """ import string, re inputs=[] for h in header.ascardlist(): if h.key=="HISTORY": g=h.value result=re.search('imcombred: (\d{6}[bfopd])\d{2} .*',g) if not result: continue file_id=result.group(1) import os status=os.system("adInfo -a CFHT -s "+file_id) if status==0: result=re.search('(\d{6}).*',file_id) if not result: continue expnum=result.group(1) inputs.append(expnum) if len(inputs)==0: ### try using the new FLIPS 2.0 keywords nit = header.get('IMCMB_NI',0) if nit==0: return(inputs) for nin in range(nit): kwd='IMCMB_'+str(nin).zfill(2) file=(header.get(kwd,'')) result=re.search('.*(\d{6}[bfopd]).*',g) if not result: continue file_id=result.group(1) import os status=os.system("adInfo -a CFHT -s "+file_id) if status==0: result=re.search('(\d{6}).*',file_id) if not result: continue expnum=result.group(1) inputs.append(expnum) return inputs
[ "def", "inputs", "(", "header", ")", ":", "import", "string", ",", "re", "inputs", "=", "[", "]", "for", "h", "in", "header", ".", "ascardlist", "(", ")", ":", "if", "h", ".", "key", "==", "\"HISTORY\"", ":", "g", "=", "h", ".", "value", "result"...
Read through the HISTORY cards in an image header looking for detrend input lines. Detrend inputs are given on lines like: HISTORY imcombred: file_id We require that the value in file_id be store in the CADC archive before adding to the inputs list.
[ "Read", "through", "the", "HISTORY", "cards", "in", "an", "image", "header", "looking", "for", "detrend", "input", "lines", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/MOPfits_old.py#L138-L188
OSSOS/MOP
src/jjk/preproc/MOPfits_old.py
elixir_decode
def elixir_decode(elixir_filename): """ Takes an elixir style file name and decodes it's content. Values returned as a dictionary. Elixir filenames have the format RUNID.TYPE.FILTER/EXPTIME.CHIPID.VERSION.fits """ import re, pyfits parts_RE=re.compile(r'([^\.\s]+)') dataset_name = parts_RE.findall(elixir_filename) ### check that this was a valid elixir_filename if not dataset_name or len(dataset_name)<5 : raise ValueError('String %s does not parse as elixir filename' % elixir_filename ) comments={'exptime': 'Integration time (seconds)', 'filter': 'Name of filter in position ', 'crunid': 'CFHT Q RunID', 'obstype': 'Observation or Exposure type', 'imageid': 'CCD chip number', 'filename': 'file name at creation of this MEF file' } keywords={} keywords['filename']=elixir_filename keywords['runid']=dataset_name[0] keywords['obstype']=dataset_name[1] keywords['exptime']=None keywords['filter']=None ### if the third part of the name is all numbers we assume exposure time if re.match(r'\d+',dataset_name[2]): keyword['exptime']=int(dataset_name[2]) else: keyword['filter']=dataset_name[2] keywords['imageid']=dataset_name[3] keywords['version']=dataset_name[4] header=pyfits.Header() for keyword in keywords.keys(): if keywords[keyword]: header.update(keyword,keywords[keyword],comment=comment[keyword]) return header
python
def elixir_decode(elixir_filename): """ Takes an elixir style file name and decodes it's content. Values returned as a dictionary. Elixir filenames have the format RUNID.TYPE.FILTER/EXPTIME.CHIPID.VERSION.fits """ import re, pyfits parts_RE=re.compile(r'([^\.\s]+)') dataset_name = parts_RE.findall(elixir_filename) ### check that this was a valid elixir_filename if not dataset_name or len(dataset_name)<5 : raise ValueError('String %s does not parse as elixir filename' % elixir_filename ) comments={'exptime': 'Integration time (seconds)', 'filter': 'Name of filter in position ', 'crunid': 'CFHT Q RunID', 'obstype': 'Observation or Exposure type', 'imageid': 'CCD chip number', 'filename': 'file name at creation of this MEF file' } keywords={} keywords['filename']=elixir_filename keywords['runid']=dataset_name[0] keywords['obstype']=dataset_name[1] keywords['exptime']=None keywords['filter']=None ### if the third part of the name is all numbers we assume exposure time if re.match(r'\d+',dataset_name[2]): keyword['exptime']=int(dataset_name[2]) else: keyword['filter']=dataset_name[2] keywords['imageid']=dataset_name[3] keywords['version']=dataset_name[4] header=pyfits.Header() for keyword in keywords.keys(): if keywords[keyword]: header.update(keyword,keywords[keyword],comment=comment[keyword]) return header
[ "def", "elixir_decode", "(", "elixir_filename", ")", ":", "import", "re", ",", "pyfits", "parts_RE", "=", "re", ".", "compile", "(", "r'([^\\.\\s]+)'", ")", "dataset_name", "=", "parts_RE", ".", "findall", "(", "elixir_filename", ")", "### check that this was a va...
Takes an elixir style file name and decodes it's content. Values returned as a dictionary. Elixir filenames have the format RUNID.TYPE.FILTER/EXPTIME.CHIPID.VERSION.fits
[ "Takes", "an", "elixir", "style", "file", "name", "and", "decodes", "it", "s", "content", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/MOPfits_old.py#L211-L257
OSSOS/MOP
src/jjk/preproc/MOPfits_old.py
create_mef
def create_mef(filename=None): """ Create a file an MEF fits file called filename. Generate a random filename if None given """ import pyfits, time if not filename: ### here I know what filename is to start with. import tempfile filename=tempfile.mktemp(suffix='.fits') else: import string, re ### filenames gotta be a string and no lead/trailing space filename=string.strip(str(filename)) ### require that the filename ends in .fits suffix=re.match(r'^.*.fits$',filename) if not suffix: filename = filename+'.fits' ### create an HDU list temp = pyfits.HDUList() ### create a primary HDU prihdu = pyfits.PrimaryHDU() ### build the header h=prihdu.header h.update('EXTEND',pyfits.TRUE,after='NAXIS') h.update('NEXTEND',0,after='EXTEND') h.add_comment('MEF created at CADC') h.add_comment('Created using '+__name__+' '+__Version__) h.add_comment('Extensions may not be in CCD order') #h.update('cfh12k',__Version__,comment='split2mef software at CADC') h.add_comment('Use the EXTNAME keyword') h.add_history('Primary HDU created on '+time.asctime()) ### stick the HDU onto the HDU list and write to file temp.append(prihdu) temp.writeto(filename) temp.close() return(filename)
python
def create_mef(filename=None): """ Create a file an MEF fits file called filename. Generate a random filename if None given """ import pyfits, time if not filename: ### here I know what filename is to start with. import tempfile filename=tempfile.mktemp(suffix='.fits') else: import string, re ### filenames gotta be a string and no lead/trailing space filename=string.strip(str(filename)) ### require that the filename ends in .fits suffix=re.match(r'^.*.fits$',filename) if not suffix: filename = filename+'.fits' ### create an HDU list temp = pyfits.HDUList() ### create a primary HDU prihdu = pyfits.PrimaryHDU() ### build the header h=prihdu.header h.update('EXTEND',pyfits.TRUE,after='NAXIS') h.update('NEXTEND',0,after='EXTEND') h.add_comment('MEF created at CADC') h.add_comment('Created using '+__name__+' '+__Version__) h.add_comment('Extensions may not be in CCD order') #h.update('cfh12k',__Version__,comment='split2mef software at CADC') h.add_comment('Use the EXTNAME keyword') h.add_history('Primary HDU created on '+time.asctime()) ### stick the HDU onto the HDU list and write to file temp.append(prihdu) temp.writeto(filename) temp.close() return(filename)
[ "def", "create_mef", "(", "filename", "=", "None", ")", ":", "import", "pyfits", ",", "time", "if", "not", "filename", ":", "### here I know what filename is to start with.", "import", "tempfile", "filename", "=", "tempfile", ".", "mktemp", "(", "suffix", "=", "...
Create a file an MEF fits file called filename. Generate a random filename if None given
[ "Create", "a", "file", "an", "MEF", "fits", "file", "called", "filename", ".", "Generate", "a", "random", "filename", "if", "None", "given" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/MOPfits_old.py#L277-L319
OSSOS/MOP
src/jjk/preproc/MOPfits_old.py
strip_pad
def strip_pad(hdu): """Remove the padding lines that CFHT adds to headers""" l = hdu.header.ascardlist() d = [] for index in range(len(l)): if l[index].key in __comment_keys and str(l[index])==__cfht_padding: d.append(index) d.reverse() for index in d: del l[index] return(0)
python
def strip_pad(hdu): """Remove the padding lines that CFHT adds to headers""" l = hdu.header.ascardlist() d = [] for index in range(len(l)): if l[index].key in __comment_keys and str(l[index])==__cfht_padding: d.append(index) d.reverse() for index in d: del l[index] return(0)
[ "def", "strip_pad", "(", "hdu", ")", ":", "l", "=", "hdu", ".", "header", ".", "ascardlist", "(", ")", "d", "=", "[", "]", "for", "index", "in", "range", "(", "len", "(", "l", ")", ")", ":", "if", "l", "[", "index", "]", ".", "key", "in", "...
Remove the padding lines that CFHT adds to headers
[ "Remove", "the", "padding", "lines", "that", "CFHT", "adds", "to", "headers" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/MOPfits_old.py#L343-L354
OSSOS/MOP
src/jjk/preproc/MOPfits_old.py
stack
def stack(outfile,infiles,verbose=0): """ Stick infiles into outfiles as FITS extensions. outfile willl contain an MEF format file of the single extension FITS files named in the infiles array """ import os, sys, string, tempfile, shutil import pyfits, re, time ### if there is a pre-existing MEF file for output then append to it ### otherwise we need to create a PrimaryHDU if os.access(outfile,os.R_OK)!=1: if verbose: print "Creating new MEF file: ",outfile outfile=create_mef(outfile) ### get a handle for the output image, _open is the local variant of ### pyfits.open and just does some error recovery if pyfits.open raises an ### exception. out = pyfits.open(outfile,'append') hdr = out[0].header count=0 ### append the fits files given on the command line to the ### output file. det_xmin=None det_xmax=None det_ymin=None det_ymax=None for infile in infiles: if verbose: print "Adding ",infile," to ",outfile ### _open tries to handle bad fits format exceptions. file=_open(infile) if not file: raise IOError("Cann't get the HDU for "+infile) for hdu in file: extname=None if hdu.header.has_key('EXTNAME') : extname=hdu.header['EXTNAME'] elif hdu.header.has_key('EXTVER') : extname="ccd"+string.zfill(hdu.header.has_key('EXTVER'),2) if hdu.header.has_key('EPOCH'): if hdu.header.has_key('EQUINOX'): del hdu.header['EPOCH'] else: hdu.header.update('EQUINOX',hdu.header['EQUINOX'].value, comment=hdu.header['EQUINOX'].comment) ahdu=pyfits.ImageHDU(data=hdu.data, header=hdu.header, name=extname) out.append(ahdu) ### build the size of the overall detector if hdu.header.has_key('DETSEC'): values=re.findall(r'(\d+)', hdu.header['DETSEC']) if len(values)==4: xmin=int(values[0]) xmax=int(values[1]) ymin=int(values[2]) ymax=int(values[3]) if xmin>xmax: t=xmin xmin=xmax xmax=t if ymin>ymax: t=ymin ymin=ymax ymax=t if xmin<det_xmin or not det_xmin: det_xmin=xmin if xmax>det_xmax or not det_xmax: det_xmax=xmax if ymin<det_ymin or not det_ymin: det_ymin=ymin if ymax>det_ymax or not det_ymax: det_ymax=ymax file.close() detsize='['+str(det_xmin)+':'+str(det_xmax)+','+str(det_ymin)+':'+str(det_ymax)+']' out[0].header.update('DETSIZE',detsize,comment='Size of Mosaic') out.close() if verbose: print "Done building MEF: ",outfile return 0
python
def stack(outfile,infiles,verbose=0): """ Stick infiles into outfiles as FITS extensions. outfile willl contain an MEF format file of the single extension FITS files named in the infiles array """ import os, sys, string, tempfile, shutil import pyfits, re, time ### if there is a pre-existing MEF file for output then append to it ### otherwise we need to create a PrimaryHDU if os.access(outfile,os.R_OK)!=1: if verbose: print "Creating new MEF file: ",outfile outfile=create_mef(outfile) ### get a handle for the output image, _open is the local variant of ### pyfits.open and just does some error recovery if pyfits.open raises an ### exception. out = pyfits.open(outfile,'append') hdr = out[0].header count=0 ### append the fits files given on the command line to the ### output file. det_xmin=None det_xmax=None det_ymin=None det_ymax=None for infile in infiles: if verbose: print "Adding ",infile," to ",outfile ### _open tries to handle bad fits format exceptions. file=_open(infile) if not file: raise IOError("Cann't get the HDU for "+infile) for hdu in file: extname=None if hdu.header.has_key('EXTNAME') : extname=hdu.header['EXTNAME'] elif hdu.header.has_key('EXTVER') : extname="ccd"+string.zfill(hdu.header.has_key('EXTVER'),2) if hdu.header.has_key('EPOCH'): if hdu.header.has_key('EQUINOX'): del hdu.header['EPOCH'] else: hdu.header.update('EQUINOX',hdu.header['EQUINOX'].value, comment=hdu.header['EQUINOX'].comment) ahdu=pyfits.ImageHDU(data=hdu.data, header=hdu.header, name=extname) out.append(ahdu) ### build the size of the overall detector if hdu.header.has_key('DETSEC'): values=re.findall(r'(\d+)', hdu.header['DETSEC']) if len(values)==4: xmin=int(values[0]) xmax=int(values[1]) ymin=int(values[2]) ymax=int(values[3]) if xmin>xmax: t=xmin xmin=xmax xmax=t if ymin>ymax: t=ymin ymin=ymax ymax=t if xmin<det_xmin or not det_xmin: det_xmin=xmin if xmax>det_xmax or not det_xmax: det_xmax=xmax if ymin<det_ymin or not det_ymin: det_ymin=ymin if ymax>det_ymax or not det_ymax: det_ymax=ymax file.close() detsize='['+str(det_xmin)+':'+str(det_xmax)+','+str(det_ymin)+':'+str(det_ymax)+']' out[0].header.update('DETSIZE',detsize,comment='Size of Mosaic') out.close() if verbose: print "Done building MEF: ",outfile return 0
[ "def", "stack", "(", "outfile", ",", "infiles", ",", "verbose", "=", "0", ")", ":", "import", "os", ",", "sys", ",", "string", ",", "tempfile", ",", "shutil", "import", "pyfits", ",", "re", ",", "time", "### if there is a pre-existing MEF file for output then ...
Stick infiles into outfiles as FITS extensions. outfile willl contain an MEF format file of the single extension FITS files named in the infiles array
[ "Stick", "infiles", "into", "outfiles", "as", "FITS", "extensions", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/MOPfits_old.py#L380-L474
OSSOS/MOP
src/jjk/preproc/MOPfits_old.py
adGet
def adGet(file_id, archive="CFHT", extno=None, cutout=None ): """Use get a fits image from the CADC.""" import os, string, re,urllib #proxy="http://www1.cadc-ccda.hia-iha.nrc-cnrc.gc.ca/authProxy/getData" proxy="http://test.cadc-ccda.hia-iha.nrc-cnrc.gc.ca/authProxy/getData" if file_id is None: return(-1) if extno is None: filename=file_id+".fits" else: filename="%s%s.fits" % (file_id, string.zfill(extno,2)) print filename if os.access(filename,os.R_OK): return filename args={ "file_id": file_id, "archive": archive } if extno is not None: args['cutout']="["+str(extno+1)+"]" else: args['cutout']='' if cutout is not None: args['cutout']=args['cutout']+cutout argline="" sep="" import sys ### get the directory that may contain the data mop_data_path=os.curdir if os.environ.has_key('MOP_DATA_PATH'): mop_data_path=os.environ['MOP_DATA_PATH'] suffix="fits" basefile=mop_data_path+"/"+file_id+".fits" print basefile if not os.access(basefile,os.R_OK): argdict={} argline='' sep='' for arg in args: if not args[arg]: continue argline+=sep+"%s=%s" % ( arg, args[arg]) sep='&' url=proxy+"?"+argline command="curl --silent -g --fail --max-time 1800 --user jkavelaars:newone '"+url+"' | gunzip > "+filename else: command="imcopy %s%s %s" % ( basefile,args['cutout'],filename) print command try: status=os.system(command) except: sys.stderr.write("Failed to execute command: %s\n" % ( command)) raise TaskError, "getData failed" if status!=0: sys.stderr.write("Failed while executing command: %s\n" % ( command)) raise TaskError, "getData failed" return filename
python
def adGet(file_id, archive="CFHT", extno=None, cutout=None ): """Use get a fits image from the CADC.""" import os, string, re,urllib #proxy="http://www1.cadc-ccda.hia-iha.nrc-cnrc.gc.ca/authProxy/getData" proxy="http://test.cadc-ccda.hia-iha.nrc-cnrc.gc.ca/authProxy/getData" if file_id is None: return(-1) if extno is None: filename=file_id+".fits" else: filename="%s%s.fits" % (file_id, string.zfill(extno,2)) print filename if os.access(filename,os.R_OK): return filename args={ "file_id": file_id, "archive": archive } if extno is not None: args['cutout']="["+str(extno+1)+"]" else: args['cutout']='' if cutout is not None: args['cutout']=args['cutout']+cutout argline="" sep="" import sys ### get the directory that may contain the data mop_data_path=os.curdir if os.environ.has_key('MOP_DATA_PATH'): mop_data_path=os.environ['MOP_DATA_PATH'] suffix="fits" basefile=mop_data_path+"/"+file_id+".fits" print basefile if not os.access(basefile,os.R_OK): argdict={} argline='' sep='' for arg in args: if not args[arg]: continue argline+=sep+"%s=%s" % ( arg, args[arg]) sep='&' url=proxy+"?"+argline command="curl --silent -g --fail --max-time 1800 --user jkavelaars:newone '"+url+"' | gunzip > "+filename else: command="imcopy %s%s %s" % ( basefile,args['cutout'],filename) print command try: status=os.system(command) except: sys.stderr.write("Failed to execute command: %s\n" % ( command)) raise TaskError, "getData failed" if status!=0: sys.stderr.write("Failed while executing command: %s\n" % ( command)) raise TaskError, "getData failed" return filename
[ "def", "adGet", "(", "file_id", ",", "archive", "=", "\"CFHT\"", ",", "extno", "=", "None", ",", "cutout", "=", "None", ")", ":", "import", "os", ",", "string", ",", "re", ",", "urllib", "#proxy=\"http://www1.cadc-ccda.hia-iha.nrc-cnrc.gc.ca/authProxy/getData\"", ...
Use get a fits image from the CADC.
[ "Use", "get", "a", "fits", "image", "from", "the", "CADC", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/MOPfits_old.py#L495-L568
OSSOS/MOP
src/jjk/preproc/MOPfits_old.py
_open
def _open(file,mode='copyonwrite'): """Opens a FITS format file and calls _open_fix if header doesn't verify correctly. """ import pyfits try: infits=pyfits.open(file,mode) hdu=infits except (ValueError,pyfits.VerifyError,pyfits.FITS_SevereError): import sys #### I really only know how to deal with one error right now. #if str(sys.exc_info()[1])=='mandatory keywords are not fixed format': hdu=_open_fix(file) #else: # print sys.exc_info()[1] # print " Failed trying to repair ", file # raise for f in hdu: strip_pad(f) return hdu
python
def _open(file,mode='copyonwrite'): """Opens a FITS format file and calls _open_fix if header doesn't verify correctly. """ import pyfits try: infits=pyfits.open(file,mode) hdu=infits except (ValueError,pyfits.VerifyError,pyfits.FITS_SevereError): import sys #### I really only know how to deal with one error right now. #if str(sys.exc_info()[1])=='mandatory keywords are not fixed format': hdu=_open_fix(file) #else: # print sys.exc_info()[1] # print " Failed trying to repair ", file # raise for f in hdu: strip_pad(f) return hdu
[ "def", "_open", "(", "file", ",", "mode", "=", "'copyonwrite'", ")", ":", "import", "pyfits", "try", ":", "infits", "=", "pyfits", ".", "open", "(", "file", ",", "mode", ")", "hdu", "=", "infits", "except", "(", "ValueError", ",", "pyfits", ".", "Ver...
Opens a FITS format file and calls _open_fix if header doesn't verify correctly.
[ "Opens", "a", "FITS", "format", "file", "and", "calls", "_open_fix", "if", "header", "doesn", "t", "verify", "correctly", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/MOPfits_old.py#L590-L611
OSSOS/MOP
src/jjk/preproc/MOPfits_old.py
find_proc_date
def find_proc_date(header): """Search the HISTORY fields of a header looking for the FLIPS processing date. """ import string, re for h in header.ascardlist(): if h.key=="HISTORY": g=h.value if ( string.find(g,'FLIPS 1.0 -:') ): result=re.search('imred: FLIPS 1.0 - \S{3} (.*) - ([\s\d]\d:\d\d:\d\d)\s*$',g) if result: date=result.group(1) time=result.group(2) datetime=date+" "+time return datetime return None
python
def find_proc_date(header): """Search the HISTORY fields of a header looking for the FLIPS processing date. """ import string, re for h in header.ascardlist(): if h.key=="HISTORY": g=h.value if ( string.find(g,'FLIPS 1.0 -:') ): result=re.search('imred: FLIPS 1.0 - \S{3} (.*) - ([\s\d]\d:\d\d:\d\d)\s*$',g) if result: date=result.group(1) time=result.group(2) datetime=date+" "+time return datetime return None
[ "def", "find_proc_date", "(", "header", ")", ":", "import", "string", ",", "re", "for", "h", "in", "header", ".", "ascardlist", "(", ")", ":", "if", "h", ".", "key", "==", "\"HISTORY\"", ":", "g", "=", "h", ".", "value", "if", "(", "string", ".", ...
Search the HISTORY fields of a header looking for the FLIPS processing date.
[ "Search", "the", "HISTORY", "fields", "of", "a", "header", "looking", "for", "the", "FLIPS", "processing", "date", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/MOPfits_old.py#L631-L646
OSSOS/MOP
src/jjk/preproc/MOPfits_old.py
find_detrend_keyword
def find_detrend_keyword(header, type): """Search through header and find the elixir formated string(s) that match the the input 'type'. header is a FITS HDU. Elixir formated strings are crunid.type.filter/exptime.chipid.version. """ import re, string value='NULL' #print type for h in header: g = str(h) if ( string.find(g,'.'+type+'.')!= -1 ): result=re.search('[^\s]*\.'+type+'\.[^\s]*\.\d\d\.\d\d',g) if result: return result.group(0)
python
def find_detrend_keyword(header, type): """Search through header and find the elixir formated string(s) that match the the input 'type'. header is a FITS HDU. Elixir formated strings are crunid.type.filter/exptime.chipid.version. """ import re, string value='NULL' #print type for h in header: g = str(h) if ( string.find(g,'.'+type+'.')!= -1 ): result=re.search('[^\s]*\.'+type+'\.[^\s]*\.\d\d\.\d\d',g) if result: return result.group(0)
[ "def", "find_detrend_keyword", "(", "header", ",", "type", ")", ":", "import", "re", ",", "string", "value", "=", "'NULL'", "#print type", "for", "h", "in", "header", ":", "g", "=", "str", "(", "h", ")", "if", "(", "string", ".", "find", "(", "g", ...
Search through header and find the elixir formated string(s) that match the the input 'type'. header is a FITS HDU. Elixir formated strings are crunid.type.filter/exptime.chipid.version.
[ "Search", "through", "header", "and", "find", "the", "elixir", "formated", "string", "(", "s", ")", "that", "match", "the", "the", "input", "type", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/MOPfits_old.py#L666-L682
OSSOS/MOP
src/jjk/preproc/MOPfits_old.py
_open_fix
def _open_fix(file): """Takes in a fits file name, open the file in binary mode and creates an HDU. Will attempt to fix some of the header keywords to match the standard FITS format. """ import pyfits, re, string temp = pyfits.HDUList() hdu = pyfits.PrimaryHDU() hdu._file=open(file,'rb') _number_RE = re.compile( r'(?P<sign>[+-])?0*(?P<digt>(\.\d+|\d+(\.\d*)?)([deDE][+-]?\d+)?)') ### here's the real difference between pyFits and cfh12kFits. ### I'm more flexible on the format of the header file so that allows me ### read more files. card_RE=re.compile(r""" (?P<KEY>[-A-Z0-9_a-za ]{8}) ### keyword is the first 8 bytes... i'll allow small letters ( ( (?P<VALUE>=\s) ### =\s indicats a value coming. (\s* ( (?P<STRING>\'[^\']*[\'/]) ### a string | (?P<FLOAT>([+-]?(\.\d+|\d+\.\d*)([dDEe][+-]?\d+)?)) ### a floating point number | (?P<INT>[+-]?\d+) ### an integer | (?P<BOOL>[TFtf]) ### perhaps value is boolian ) \s* (( / )?(?P<COMMENT>.*))? ### value related comment. ) ) | (?P<C2>.*) ### strickly a comment field ) """,re.VERBOSE) done=0 while ( not done): ### read a line of 80 characters up to a new line from the file. block=hdu._file.readline(80) string_end=79 if len(block)== 0: done=1 continue if block[-1]=='\n': string_end=len(block)-2 line = re.match(r'[ -~]{0,'+str(string_end)+'}',block) line = string.ljust(line.group(0),80)[0:79] if line[0:8] == 'END ': done=1 break card=card_RE.match(line) if not card or not card.group('KEY'): print card.groups() raise SyntaxError("Failed to get keyword from FITS Card %s" % line) key=card.group('KEY') value=None if card.group('INT'): try: value=int(card.group('INT')) except: value=card.group('INT') elif card.group('FLOAT'): try: value=float(card.group('FLOAT')) except: value=float(card.group('FLOAT')) elif card.group('BOOL'): value=pyfits.Boolean(card.group('BOOL')) elif card.group('STRING'): value=card.group('STRING')[1:-1] if card.group('COMMENT'): _comment=card.group('COMMENT') elif card.group('C2'): _comment=card.group('C2') else: _comment=None try: if key =='COMMENT ': hdu.header.add_comment(_comment) elif key =='HISTORY ': hdu.header.add_history(_comment) elif key ==' ': hdu.header.add_blank(_comment) elif key: if key =='DATE-OBS' and value: value=string.replace(value,'/','-') hdu.header.update(key,value,comment=_comment) except: raise SyntaxError("Failed to convert line to FITS Card %s" % line) ### set some internal variables to decided on data flow. hdu._bzero=hdu.header.get('BZERO',0) hdu._bscale=hdu.header.get('BSCALE',1) hdu._bitpix=hdu.header.get('BITPIX',-16) if hdu.header.get('NAXIS',0)>0: naxis1=hdu.header.get('NAXIS1',1) naxis2=hdu.header.get('NAXIS2',1) ### now read the data... this is a HACK from pyfits.py import numarray as num code = pyfits._ImageBaseHDU.NumCode[hdu._bitpix] dims = tuple([naxis2,naxis1]) raw_data = num.fromfile(hdu._file,type=code,shape=dims) raw_data._byteorder='big' if ( hdu._bzero != 0 or hdu._bscale!=1 ): if hdu._bitpix > 0 : hdu.data=num.array(raw_data,type=num.Float32) else: hdu.data=raw_data if hdu._bscale != 1: num.multiply(hdu.data,hdu._bscale,hdu.data) if hdu._bzero!=0: hdu.data=hdu.data + hdu._bzero del hdu.header['BSCALE'] del hdu.header['BZERO'] hdu.header['BITPIX']=pyfits._ImageBaseHDU.ImgCode[hdu.data.type()] temp.append(hdu) return temp
python
def _open_fix(file): """Takes in a fits file name, open the file in binary mode and creates an HDU. Will attempt to fix some of the header keywords to match the standard FITS format. """ import pyfits, re, string temp = pyfits.HDUList() hdu = pyfits.PrimaryHDU() hdu._file=open(file,'rb') _number_RE = re.compile( r'(?P<sign>[+-])?0*(?P<digt>(\.\d+|\d+(\.\d*)?)([deDE][+-]?\d+)?)') ### here's the real difference between pyFits and cfh12kFits. ### I'm more flexible on the format of the header file so that allows me ### read more files. card_RE=re.compile(r""" (?P<KEY>[-A-Z0-9_a-za ]{8}) ### keyword is the first 8 bytes... i'll allow small letters ( ( (?P<VALUE>=\s) ### =\s indicats a value coming. (\s* ( (?P<STRING>\'[^\']*[\'/]) ### a string | (?P<FLOAT>([+-]?(\.\d+|\d+\.\d*)([dDEe][+-]?\d+)?)) ### a floating point number | (?P<INT>[+-]?\d+) ### an integer | (?P<BOOL>[TFtf]) ### perhaps value is boolian ) \s* (( / )?(?P<COMMENT>.*))? ### value related comment. ) ) | (?P<C2>.*) ### strickly a comment field ) """,re.VERBOSE) done=0 while ( not done): ### read a line of 80 characters up to a new line from the file. block=hdu._file.readline(80) string_end=79 if len(block)== 0: done=1 continue if block[-1]=='\n': string_end=len(block)-2 line = re.match(r'[ -~]{0,'+str(string_end)+'}',block) line = string.ljust(line.group(0),80)[0:79] if line[0:8] == 'END ': done=1 break card=card_RE.match(line) if not card or not card.group('KEY'): print card.groups() raise SyntaxError("Failed to get keyword from FITS Card %s" % line) key=card.group('KEY') value=None if card.group('INT'): try: value=int(card.group('INT')) except: value=card.group('INT') elif card.group('FLOAT'): try: value=float(card.group('FLOAT')) except: value=float(card.group('FLOAT')) elif card.group('BOOL'): value=pyfits.Boolean(card.group('BOOL')) elif card.group('STRING'): value=card.group('STRING')[1:-1] if card.group('COMMENT'): _comment=card.group('COMMENT') elif card.group('C2'): _comment=card.group('C2') else: _comment=None try: if key =='COMMENT ': hdu.header.add_comment(_comment) elif key =='HISTORY ': hdu.header.add_history(_comment) elif key ==' ': hdu.header.add_blank(_comment) elif key: if key =='DATE-OBS' and value: value=string.replace(value,'/','-') hdu.header.update(key,value,comment=_comment) except: raise SyntaxError("Failed to convert line to FITS Card %s" % line) ### set some internal variables to decided on data flow. hdu._bzero=hdu.header.get('BZERO',0) hdu._bscale=hdu.header.get('BSCALE',1) hdu._bitpix=hdu.header.get('BITPIX',-16) if hdu.header.get('NAXIS',0)>0: naxis1=hdu.header.get('NAXIS1',1) naxis2=hdu.header.get('NAXIS2',1) ### now read the data... this is a HACK from pyfits.py import numarray as num code = pyfits._ImageBaseHDU.NumCode[hdu._bitpix] dims = tuple([naxis2,naxis1]) raw_data = num.fromfile(hdu._file,type=code,shape=dims) raw_data._byteorder='big' if ( hdu._bzero != 0 or hdu._bscale!=1 ): if hdu._bitpix > 0 : hdu.data=num.array(raw_data,type=num.Float32) else: hdu.data=raw_data if hdu._bscale != 1: num.multiply(hdu.data,hdu._bscale,hdu.data) if hdu._bzero!=0: hdu.data=hdu.data + hdu._bzero del hdu.header['BSCALE'] del hdu.header['BZERO'] hdu.header['BITPIX']=pyfits._ImageBaseHDU.ImgCode[hdu.data.type()] temp.append(hdu) return temp
[ "def", "_open_fix", "(", "file", ")", ":", "import", "pyfits", ",", "re", ",", "string", "temp", "=", "pyfits", ".", "HDUList", "(", ")", "hdu", "=", "pyfits", ".", "PrimaryHDU", "(", ")", "hdu", ".", "_file", "=", "open", "(", "file", ",", "'rb'",...
Takes in a fits file name, open the file in binary mode and creates an HDU. Will attempt to fix some of the header keywords to match the standard FITS format.
[ "Takes", "in", "a", "fits", "file", "name", "open", "the", "file", "in", "binary", "mode", "and", "creates", "an", "HDU", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/MOPfits_old.py#L704-L842
OSSOS/MOP
src/ossos/core/ossos/ssos.py
TracksParser.query_ssos
def query_ssos(self, mpc_observations, lunation_count=None): """Send a query to the SSOS web service, looking for available observations using the given track. :param mpc_observations: a list of mpc.Observations :param lunation_count: how many dark runs (+ and -) to search into :return: an SSOSData object :rtype: SSOSData """ # we observe ~ a week either side of new moon # but we don't know when in the dark run the discovery happened # so be generous with the search boundaries, add extra 2 weeks # current date just has to be the night of the triplet, if lunation_count is None: # Only using SSOS to find data acquired during the survey period, for now. search_start_date = Time('2013-02-08', scale='utc') search_end_date = Time(datetime.datetime.now().strftime('%Y-%m-%d'), scale='utc') else: search_start_date = Time((mpc_observations[0].date.jd * units.day - ( self._nights_per_darkrun + lunation_count * self._nights_separating_darkruns)), format='jd', scale='utc') search_end_date = Time((mpc_observations[-1].date.jd * units.day + ( self._nights_per_darkrun + lunation_count * self._nights_separating_darkruns)), format='jd', scale='utc') logger.info("Sending query to SSOS start_date: {} end_data: {}\n".format(search_start_date, search_end_date)) query = Query(mpc_observations, search_start_date=search_start_date, search_end_date=search_end_date) logger.debug("Parsing query results...") tracks_data = self.ssos_parser.parse(query.get(), mpc_observations=mpc_observations) tracks_data.mpc_observations = {} for mpc_observation in mpc_observations: # attach the input observations to the the SSOS query result. if isinstance(mpc_observation.comment, mpc.OSSOSComment): try: tracks_data.mpc_observations[mpc_observation.comment.frame.strip()] = mpc_observation except Exception as e: logger.error(str(e)) logger.error(mpc_observation) ref_sky_coord = None min_radius = config.read('CUTOUTS.SINGLETS.RADIUS') if not isinstance(min_radius, units.Quantity): min_radius = min_radius * units.arcsec for source in tracks_data.get_sources(): astrom_observations = tracks_data.observations source_readings = source.get_readings() foci = [] # Loop over all the sources to determine which ones go which which focus location. # this is helpful to for blinking. for idx in range(len(source_readings)): source_reading = source_readings[idx] astrom_observation = astrom_observations[idx] self.orbit.predict(Time(astrom_observation.mjd, format='mjd', scale='utc')) assert isinstance(source_reading, SourceReading) if ref_sky_coord is None or source_reading.sky_coord.separation(ref_sky_coord) > min_radius * 0.8: foci.append([]) ref_sky_coord = source_reading.sky_coord foci[-1].append(source_reading) for focus in foci: ra = numpy.zeros(len(focus)) dec = numpy.zeros(len(focus)) for idx in range(len(focus)): source_reading = focus[idx] ra[idx] = source_reading.sky_coord.ra.to('degree').value dec[idx] = source_reading.sky_coord.dec.to('degree').value ref_sky_coord = SkyCoord(ra.mean(), dec.mean(), unit='degree') for source_reading in focus: source_reading.reference_sky_coord = ref_sky_coord source_reading.pa = self.orbit.pa # why are these being recorded just in pixels? Because the error ellipse is drawn in pixels. # TODO: Modify error ellipse drawing routine to use WCS but be sure # that this does not cause trouble with the use of dra/ddec for cutout computer source_reading.dx = self.orbit.dra source_reading.dy = self.orbit.ddec frame = astrom_observation.rawname if frame in tracks_data.mpc_observations: source_reading.discovery = tracks_data.mpc_observations[frame].discovery return tracks_data
python
def query_ssos(self, mpc_observations, lunation_count=None): """Send a query to the SSOS web service, looking for available observations using the given track. :param mpc_observations: a list of mpc.Observations :param lunation_count: how many dark runs (+ and -) to search into :return: an SSOSData object :rtype: SSOSData """ # we observe ~ a week either side of new moon # but we don't know when in the dark run the discovery happened # so be generous with the search boundaries, add extra 2 weeks # current date just has to be the night of the triplet, if lunation_count is None: # Only using SSOS to find data acquired during the survey period, for now. search_start_date = Time('2013-02-08', scale='utc') search_end_date = Time(datetime.datetime.now().strftime('%Y-%m-%d'), scale='utc') else: search_start_date = Time((mpc_observations[0].date.jd * units.day - ( self._nights_per_darkrun + lunation_count * self._nights_separating_darkruns)), format='jd', scale='utc') search_end_date = Time((mpc_observations[-1].date.jd * units.day + ( self._nights_per_darkrun + lunation_count * self._nights_separating_darkruns)), format='jd', scale='utc') logger.info("Sending query to SSOS start_date: {} end_data: {}\n".format(search_start_date, search_end_date)) query = Query(mpc_observations, search_start_date=search_start_date, search_end_date=search_end_date) logger.debug("Parsing query results...") tracks_data = self.ssos_parser.parse(query.get(), mpc_observations=mpc_observations) tracks_data.mpc_observations = {} for mpc_observation in mpc_observations: # attach the input observations to the the SSOS query result. if isinstance(mpc_observation.comment, mpc.OSSOSComment): try: tracks_data.mpc_observations[mpc_observation.comment.frame.strip()] = mpc_observation except Exception as e: logger.error(str(e)) logger.error(mpc_observation) ref_sky_coord = None min_radius = config.read('CUTOUTS.SINGLETS.RADIUS') if not isinstance(min_radius, units.Quantity): min_radius = min_radius * units.arcsec for source in tracks_data.get_sources(): astrom_observations = tracks_data.observations source_readings = source.get_readings() foci = [] # Loop over all the sources to determine which ones go which which focus location. # this is helpful to for blinking. for idx in range(len(source_readings)): source_reading = source_readings[idx] astrom_observation = astrom_observations[idx] self.orbit.predict(Time(astrom_observation.mjd, format='mjd', scale='utc')) assert isinstance(source_reading, SourceReading) if ref_sky_coord is None or source_reading.sky_coord.separation(ref_sky_coord) > min_radius * 0.8: foci.append([]) ref_sky_coord = source_reading.sky_coord foci[-1].append(source_reading) for focus in foci: ra = numpy.zeros(len(focus)) dec = numpy.zeros(len(focus)) for idx in range(len(focus)): source_reading = focus[idx] ra[idx] = source_reading.sky_coord.ra.to('degree').value dec[idx] = source_reading.sky_coord.dec.to('degree').value ref_sky_coord = SkyCoord(ra.mean(), dec.mean(), unit='degree') for source_reading in focus: source_reading.reference_sky_coord = ref_sky_coord source_reading.pa = self.orbit.pa # why are these being recorded just in pixels? Because the error ellipse is drawn in pixels. # TODO: Modify error ellipse drawing routine to use WCS but be sure # that this does not cause trouble with the use of dra/ddec for cutout computer source_reading.dx = self.orbit.dra source_reading.dy = self.orbit.ddec frame = astrom_observation.rawname if frame in tracks_data.mpc_observations: source_reading.discovery = tracks_data.mpc_observations[frame].discovery return tracks_data
[ "def", "query_ssos", "(", "self", ",", "mpc_observations", ",", "lunation_count", "=", "None", ")", ":", "# we observe ~ a week either side of new moon", "# but we don't know when in the dark run the discovery happened", "# so be generous with the search boundaries, add extra 2 weeks", ...
Send a query to the SSOS web service, looking for available observations using the given track. :param mpc_observations: a list of mpc.Observations :param lunation_count: how many dark runs (+ and -) to search into :return: an SSOSData object :rtype: SSOSData
[ "Send", "a", "query", "to", "the", "SSOS", "web", "service", "looking", "for", "available", "observations", "using", "the", "given", "track", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/ssos.py#L88-L173
OSSOS/MOP
src/ossos/core/ossos/ssos.py
TrackTarget.query_ssos
def query_ssos(self, target_name, lunation_count=None): """Send a query to the SSOS web service, looking for available observations using the given track. :param target_name: name of target to query against SSOIS db :param lunation_count: ignored :rtype: SSOSData """ # we observe ~ a week either side of new moon # but we don't know when in the dark run the discovery happened # so be generous with the search boundaries, add extra 2 weeks # current date just has to be the night of the triplet, from mp_ephem import horizons search_start_date = Time('1999-01-01', scale='utc') search_end_date = Time(datetime.datetime.now().strftime('%Y-%m-%d'), scale='utc') logger.info("Sending query to SSOS start_date: {} end_data: {}\n".format(search_start_date, search_end_date)) query = Query(target_name, search_start_date=search_start_date, search_end_date=search_end_date) logger.debug("Parsing query results...") tracks_data = self.ssos_parser.parse(query.get()) tracks_data.mpc_observations = {} start_time = Time(search_start_date) stop_time = Time(search_end_date) step_size = 5 * units.hour self.orbit = horizons.Body(target_name, start_time, stop_time, step_size) ref_sky_coord = None for source in tracks_data.get_sources(): astrom_observations = tracks_data.observations source_readings = source.get_readings() for idx in range(len(source_readings)): source_reading = source_readings[idx] assert isinstance(source_reading, SourceReading) if ref_sky_coord is None or source_reading.sky_coord.separation(ref_sky_coord) > 40 * units.arcsec: ref_sky_coord = source_reading.sky_coord source_reading.reference_sky_coord = ref_sky_coord astrom_observation = astrom_observations[idx] self.orbit.predict(Time(astrom_observation.mjd, format='mjd', scale='utc')) source_reading.pa = self.orbit.pa # why are these being recorded just in pixels? Because the error ellipse is drawn in pixels. # TODO: Modify error ellipse drawing routine to use WCS but be sure # that this does not cause trouble with the use of dra/ddec for cutout computer source_reading.dx = self.orbit.dra source_reading.dy = self.orbit.ddec logger.debug("Sending back set of observations that might contain the target: {}".format(tracks_data)) return tracks_data
python
def query_ssos(self, target_name, lunation_count=None): """Send a query to the SSOS web service, looking for available observations using the given track. :param target_name: name of target to query against SSOIS db :param lunation_count: ignored :rtype: SSOSData """ # we observe ~ a week either side of new moon # but we don't know when in the dark run the discovery happened # so be generous with the search boundaries, add extra 2 weeks # current date just has to be the night of the triplet, from mp_ephem import horizons search_start_date = Time('1999-01-01', scale='utc') search_end_date = Time(datetime.datetime.now().strftime('%Y-%m-%d'), scale='utc') logger.info("Sending query to SSOS start_date: {} end_data: {}\n".format(search_start_date, search_end_date)) query = Query(target_name, search_start_date=search_start_date, search_end_date=search_end_date) logger.debug("Parsing query results...") tracks_data = self.ssos_parser.parse(query.get()) tracks_data.mpc_observations = {} start_time = Time(search_start_date) stop_time = Time(search_end_date) step_size = 5 * units.hour self.orbit = horizons.Body(target_name, start_time, stop_time, step_size) ref_sky_coord = None for source in tracks_data.get_sources(): astrom_observations = tracks_data.observations source_readings = source.get_readings() for idx in range(len(source_readings)): source_reading = source_readings[idx] assert isinstance(source_reading, SourceReading) if ref_sky_coord is None or source_reading.sky_coord.separation(ref_sky_coord) > 40 * units.arcsec: ref_sky_coord = source_reading.sky_coord source_reading.reference_sky_coord = ref_sky_coord astrom_observation = astrom_observations[idx] self.orbit.predict(Time(astrom_observation.mjd, format='mjd', scale='utc')) source_reading.pa = self.orbit.pa # why are these being recorded just in pixels? Because the error ellipse is drawn in pixels. # TODO: Modify error ellipse drawing routine to use WCS but be sure # that this does not cause trouble with the use of dra/ddec for cutout computer source_reading.dx = self.orbit.dra source_reading.dy = self.orbit.ddec logger.debug("Sending back set of observations that might contain the target: {}".format(tracks_data)) return tracks_data
[ "def", "query_ssos", "(", "self", ",", "target_name", ",", "lunation_count", "=", "None", ")", ":", "# we observe ~ a week either side of new moon", "# but we don't know when in the dark run the discovery happened", "# so be generous with the search boundaries, add extra 2 weeks", "# c...
Send a query to the SSOS web service, looking for available observations using the given track. :param target_name: name of target to query against SSOIS db :param lunation_count: ignored :rtype: SSOSData
[ "Send", "a", "query", "to", "the", "SSOS", "web", "service", "looking", "for", "available", "observations", "using", "the", "given", "track", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/ssos.py#L189-L239
OSSOS/MOP
src/ossos/core/ossos/ssos.py
SSOSParser.build_source_reading
def build_source_reading(expnum, ccd=None, ftype='p'): """ Build an astrom.Observation object for a SourceReading :param expnum: (str) Name or CFHT Exposure number of the observation. :param ccd: (str) CCD is this observation associated with. (can be None) :param ftype: (str) exposure time (specific to CFHT imaging) :return: An astrom.Observation object for the observation. :rtype: astrom.Observation """ logger.debug("Building source reading for expnum:{} ccd:{} ftype:{}".format(expnum, ccd, ftype)) return astrom.Observation(expnum=str(expnum), ftype=ftype, ccdnum=ccd)
python
def build_source_reading(expnum, ccd=None, ftype='p'): """ Build an astrom.Observation object for a SourceReading :param expnum: (str) Name or CFHT Exposure number of the observation. :param ccd: (str) CCD is this observation associated with. (can be None) :param ftype: (str) exposure time (specific to CFHT imaging) :return: An astrom.Observation object for the observation. :rtype: astrom.Observation """ logger.debug("Building source reading for expnum:{} ccd:{} ftype:{}".format(expnum, ccd, ftype)) return astrom.Observation(expnum=str(expnum), ftype=ftype, ccdnum=ccd)
[ "def", "build_source_reading", "(", "expnum", ",", "ccd", "=", "None", ",", "ftype", "=", "'p'", ")", ":", "logger", ".", "debug", "(", "\"Building source reading for expnum:{} ccd:{} ftype:{}\"", ".", "format", "(", "expnum", ",", "ccd", ",", "ftype", ")", ")...
Build an astrom.Observation object for a SourceReading :param expnum: (str) Name or CFHT Exposure number of the observation. :param ccd: (str) CCD is this observation associated with. (can be None) :param ftype: (str) exposure time (specific to CFHT imaging) :return: An astrom.Observation object for the observation. :rtype: astrom.Observation
[ "Build", "an", "astrom", ".", "Observation", "object", "for", "a", "SourceReading" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/ssos.py#L280-L296
OSSOS/MOP
src/ossos/core/ossos/ssos.py
SSOSParser.parse
def parse(self, ssos_result_filename_or_lines, mpc_observations=None): """ given the result table create 'source' objects. :param ssos_result_filename_or_lines: :param mpc_observations: a list of mpc.Observation objects used to retrieve the SSOS observations """ table_reader = ascii.get_reader(Reader=ascii.Basic) table_reader.inconsistent_handler = self._skip_missing_data table_reader.header.splitter.delimiter = '\t' table_reader.data.splitter.delimiter = '\t' ssos_table = table_reader.read(ssos_result_filename_or_lines) dbimage_list = storage.list_dbimages(dbimages=storage.DBIMAGES) logger.debug("Comparing to {} observations in dbimages: {}".format(len(dbimage_list), storage.DBIMAGES)) sources = [] observations = [] source_readings = [] if mpc_observations is not None and isinstance(mpc_observations[0], mpc.Observation): orbit = Orbfit(mpc_observations) else: from mp_ephem import horizons start_time = Time(min(ssos_table['MJD']), format='mjd') stop_time = Time(max(ssos_table['MJD']), format='mjd') step_size = 5.0 * units.hour orbit = horizons.Body(self.provisional_name, start_time, stop_time, step_size) warnings.filterwarnings('ignore') logger.info("Loading {} observations\n".format(len(ssos_table))) expnums_examined = [] for row in ssos_table: # Trim down to OSSOS-specific images logger.debug("Checking row: {}".format(row)) if (row['Filter'] not in parameters.OSSOS_FILTERS) or row['Image_target'].startswith('WP'): logger.debug("Failed filter / target name check") continue # check if a dbimages object exists # For CFHT/MegaCam strip off the trailing character to get the exposure number. ftype = row['Image'][-1] expnum = row['Image'][:-1] if str(expnum) not in dbimage_list: logger.debug("Expnum: {} Failed dbimage list check".format(expnum)) continue logger.debug("Expnum: {} Passed dbimage list check".format(expnum)) # The file extension is the ccd number + 1 , or the first extension. ccd = int(row['Ext'])-1 if 39 < ccd < 0 or ccd < 0: ccd = None x = row['X'] * units.pix y = row['Y'] * units.pix ra = row['Object_RA'] * units.degree dec = row['Object_Dec'] * units.degree ssois_coordinate = SkyCoord(ra, dec) mjd = row['MJD'] * units.day # if not 0 < x.value < 2060 or not 0 < y.value < 4700: # continue obs_date = Time(mjd, format='mjd', scale='utc') logger.info("Calling predict") orbit.predict(obs_date) logger.info("Done calling predict") if orbit.dra > 4 * units.arcminute or orbit.ddec > 4.0 * units.arcminute: print "Skipping entry as orbit uncertainty at date {} is large.".format(obs_date) continue if expnum in expnums_examined: logger.debug("Already checked this exposure.") continue expnums_examined.append(expnum) logger.debug(("SSOIS Prediction: exposure:{} ext:{} " "ra:{} dec:{} x:{} y:{}").format(expnum, ccd, ra, dec, x, y)) logger.debug(("Orbfit Prediction: " "ra:{} dec:{} ").format(orbit.coordinate.ra.to(units.degree), orbit.coordinate.dec.to(units.degree))) logger.info("Building Observation") observation = SSOSParser.build_source_reading(expnum, ccd, ftype=ftype) observation.mjd = mjd from_input_file = observation.rawname in self.input_rawnames # compare to input observation list. previous = False mpc_observation = None if from_input_file: for mpc_observation in mpc_observations: try: if mpc_observation.comment.frame.strip() == observation.rawname: # only skip previous obseravtions if not discovery. previous = not mpc_observation.discovery break except Exception as e: logger.debug(str(e)) pass mpc_observation = None # skip previously measured observations if requested. if self.skip_previous and ( previous or observation.rawname in self.null_observations): continue logger.info('built observation {}'.format(observation)) observations.append(observation) null_observation = observation.rawname in self.null_observations ddec = orbit.ddec + abs(orbit.coordinate.dec - ssois_coordinate.dec) dra = orbit.dra + abs(orbit.coordinate.ra - ssois_coordinate.ra) logger.info(" Building SourceReading .... \n") source_reading = astrom.SourceReading(x=x, y=y, x0=x, y0=y, ra=orbit.coordinate.ra.to(units.degree).value, dec=orbit.coordinate.dec.to(units.degree).value, xref=x, yref=y, obs=observation, ssos=True, from_input_file=from_input_file, dx=dra, dy=ddec, pa=orbit.pa, null_observation=null_observation) source_reading.mpc_observation = mpc_observation source_readings.append(source_reading) logger.info("Source Reading Built") # build our array of SourceReading objects sources.append(source_readings) warnings.filterwarnings('once') return SSOSData(observations, sources, self.provisional_name)
python
def parse(self, ssos_result_filename_or_lines, mpc_observations=None): """ given the result table create 'source' objects. :param ssos_result_filename_or_lines: :param mpc_observations: a list of mpc.Observation objects used to retrieve the SSOS observations """ table_reader = ascii.get_reader(Reader=ascii.Basic) table_reader.inconsistent_handler = self._skip_missing_data table_reader.header.splitter.delimiter = '\t' table_reader.data.splitter.delimiter = '\t' ssos_table = table_reader.read(ssos_result_filename_or_lines) dbimage_list = storage.list_dbimages(dbimages=storage.DBIMAGES) logger.debug("Comparing to {} observations in dbimages: {}".format(len(dbimage_list), storage.DBIMAGES)) sources = [] observations = [] source_readings = [] if mpc_observations is not None and isinstance(mpc_observations[0], mpc.Observation): orbit = Orbfit(mpc_observations) else: from mp_ephem import horizons start_time = Time(min(ssos_table['MJD']), format='mjd') stop_time = Time(max(ssos_table['MJD']), format='mjd') step_size = 5.0 * units.hour orbit = horizons.Body(self.provisional_name, start_time, stop_time, step_size) warnings.filterwarnings('ignore') logger.info("Loading {} observations\n".format(len(ssos_table))) expnums_examined = [] for row in ssos_table: # Trim down to OSSOS-specific images logger.debug("Checking row: {}".format(row)) if (row['Filter'] not in parameters.OSSOS_FILTERS) or row['Image_target'].startswith('WP'): logger.debug("Failed filter / target name check") continue # check if a dbimages object exists # For CFHT/MegaCam strip off the trailing character to get the exposure number. ftype = row['Image'][-1] expnum = row['Image'][:-1] if str(expnum) not in dbimage_list: logger.debug("Expnum: {} Failed dbimage list check".format(expnum)) continue logger.debug("Expnum: {} Passed dbimage list check".format(expnum)) # The file extension is the ccd number + 1 , or the first extension. ccd = int(row['Ext'])-1 if 39 < ccd < 0 or ccd < 0: ccd = None x = row['X'] * units.pix y = row['Y'] * units.pix ra = row['Object_RA'] * units.degree dec = row['Object_Dec'] * units.degree ssois_coordinate = SkyCoord(ra, dec) mjd = row['MJD'] * units.day # if not 0 < x.value < 2060 or not 0 < y.value < 4700: # continue obs_date = Time(mjd, format='mjd', scale='utc') logger.info("Calling predict") orbit.predict(obs_date) logger.info("Done calling predict") if orbit.dra > 4 * units.arcminute or orbit.ddec > 4.0 * units.arcminute: print "Skipping entry as orbit uncertainty at date {} is large.".format(obs_date) continue if expnum in expnums_examined: logger.debug("Already checked this exposure.") continue expnums_examined.append(expnum) logger.debug(("SSOIS Prediction: exposure:{} ext:{} " "ra:{} dec:{} x:{} y:{}").format(expnum, ccd, ra, dec, x, y)) logger.debug(("Orbfit Prediction: " "ra:{} dec:{} ").format(orbit.coordinate.ra.to(units.degree), orbit.coordinate.dec.to(units.degree))) logger.info("Building Observation") observation = SSOSParser.build_source_reading(expnum, ccd, ftype=ftype) observation.mjd = mjd from_input_file = observation.rawname in self.input_rawnames # compare to input observation list. previous = False mpc_observation = None if from_input_file: for mpc_observation in mpc_observations: try: if mpc_observation.comment.frame.strip() == observation.rawname: # only skip previous obseravtions if not discovery. previous = not mpc_observation.discovery break except Exception as e: logger.debug(str(e)) pass mpc_observation = None # skip previously measured observations if requested. if self.skip_previous and ( previous or observation.rawname in self.null_observations): continue logger.info('built observation {}'.format(observation)) observations.append(observation) null_observation = observation.rawname in self.null_observations ddec = orbit.ddec + abs(orbit.coordinate.dec - ssois_coordinate.dec) dra = orbit.dra + abs(orbit.coordinate.ra - ssois_coordinate.ra) logger.info(" Building SourceReading .... \n") source_reading = astrom.SourceReading(x=x, y=y, x0=x, y0=y, ra=orbit.coordinate.ra.to(units.degree).value, dec=orbit.coordinate.dec.to(units.degree).value, xref=x, yref=y, obs=observation, ssos=True, from_input_file=from_input_file, dx=dra, dy=ddec, pa=orbit.pa, null_observation=null_observation) source_reading.mpc_observation = mpc_observation source_readings.append(source_reading) logger.info("Source Reading Built") # build our array of SourceReading objects sources.append(source_readings) warnings.filterwarnings('once') return SSOSData(observations, sources, self.provisional_name)
[ "def", "parse", "(", "self", ",", "ssos_result_filename_or_lines", ",", "mpc_observations", "=", "None", ")", ":", "table_reader", "=", "ascii", ".", "get_reader", "(", "Reader", "=", "ascii", ".", "Basic", ")", "table_reader", ".", "inconsistent_handler", "=", ...
given the result table create 'source' objects. :param ssos_result_filename_or_lines: :param mpc_observations: a list of mpc.Observation objects used to retrieve the SSOS observations
[ "given", "the", "result", "table", "create", "source", "objects", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/ssos.py#L298-L424
OSSOS/MOP
src/ossos/core/ossos/ssos.py
ParamDictBuilder.search_end_date
def search_end_date(self, search_end_date): """ :type search_end_date: astropy.io.Time :param search_end_date: search for frames take after the given date. """ assert isinstance(search_end_date, Time) self._search_end_date = search_end_date.replicate(format='iso') self._search_end_date.out_subfmt = 'date'
python
def search_end_date(self, search_end_date): """ :type search_end_date: astropy.io.Time :param search_end_date: search for frames take after the given date. """ assert isinstance(search_end_date, Time) self._search_end_date = search_end_date.replicate(format='iso') self._search_end_date.out_subfmt = 'date'
[ "def", "search_end_date", "(", "self", ",", "search_end_date", ")", ":", "assert", "isinstance", "(", "search_end_date", ",", "Time", ")", "self", ".", "_search_end_date", "=", "search_end_date", ".", "replicate", "(", "format", "=", "'iso'", ")", "self", ".",...
:type search_end_date: astropy.io.Time :param search_end_date: search for frames take after the given date.
[ ":", "type", "search_end_date", ":", "astropy", ".", "io", ".", "Time", ":", "param", "search_end_date", ":", "search", "for", "frames", "take", "after", "the", "given", "date", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/ssos.py#L580-L587
OSSOS/MOP
src/ossos/core/ossos/ssos.py
ParamDictBuilder.params
def params(self): """ :return: A dictionary of SSOS query parameters. :rtype: dict """ params = dict(format=RESPONSE_FORMAT, verbose=self.verbose, epoch1=str(self.search_start_date), epoch2=str(self.search_end_date), search=self.orbit_method, eunits=self.error_units, eellipse=self.error_ellipse, extres=self.resolve_extension, xyres=self.resolve_position, telinst=self.telescope_instrument) if self.orbit_method == 'bynameHorizons': params['object'] = NEW_LINE.join((str(target_name) for target_name in self.observations)) else: params['obs'] = NEW_LINE.join((str(observation) for observation in self.observations)) return params
python
def params(self): """ :return: A dictionary of SSOS query parameters. :rtype: dict """ params = dict(format=RESPONSE_FORMAT, verbose=self.verbose, epoch1=str(self.search_start_date), epoch2=str(self.search_end_date), search=self.orbit_method, eunits=self.error_units, eellipse=self.error_ellipse, extres=self.resolve_extension, xyres=self.resolve_position, telinst=self.telescope_instrument) if self.orbit_method == 'bynameHorizons': params['object'] = NEW_LINE.join((str(target_name) for target_name in self.observations)) else: params['obs'] = NEW_LINE.join((str(observation) for observation in self.observations)) return params
[ "def", "params", "(", "self", ")", ":", "params", "=", "dict", "(", "format", "=", "RESPONSE_FORMAT", ",", "verbose", "=", "self", ".", "verbose", ",", "epoch1", "=", "str", "(", "self", ".", "search_start_date", ")", ",", "epoch2", "=", "str", "(", ...
:return: A dictionary of SSOS query parameters. :rtype: dict
[ ":", "return", ":", "A", "dictionary", "of", "SSOS", "query", "parameters", ".", ":", "rtype", ":", "dict" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/ssos.py#L677-L697
OSSOS/MOP
src/ossos/core/ossos/ssos.py
Query.get
def get(self): """ :return: A string containing the TSV result from SSOS :rtype: str :raise: AssertionError """ params = self.param_dict_builder.params logger.debug(pprint.pformat(format(params))) response = requests.post(SSOS_URL, data=params, headers=self.headers) logger.debug(response.url) assert isinstance(response, requests.Response) assert (response.status_code == requests.codes.ok) lines = response.content # note: spelling 'occured' is in SSOIS if len(lines) < 2 or "An error occured getting the ephemeris" in lines: print lines print response.url raise IOError(os.errno.EACCES, "call to SSOIS failed on format error") if os.access("backdoor.tsv", os.R_OK): lines += open("backdoor.tsv").read() return lines
python
def get(self): """ :return: A string containing the TSV result from SSOS :rtype: str :raise: AssertionError """ params = self.param_dict_builder.params logger.debug(pprint.pformat(format(params))) response = requests.post(SSOS_URL, data=params, headers=self.headers) logger.debug(response.url) assert isinstance(response, requests.Response) assert (response.status_code == requests.codes.ok) lines = response.content # note: spelling 'occured' is in SSOIS if len(lines) < 2 or "An error occured getting the ephemeris" in lines: print lines print response.url raise IOError(os.errno.EACCES, "call to SSOIS failed on format error") if os.access("backdoor.tsv", os.R_OK): lines += open("backdoor.tsv").read() return lines
[ "def", "get", "(", "self", ")", ":", "params", "=", "self", ".", "param_dict_builder", ".", "params", "logger", ".", "debug", "(", "pprint", ".", "pformat", "(", "format", "(", "params", ")", ")", ")", "response", "=", "requests", ".", "post", "(", "...
:return: A string containing the TSV result from SSOS :rtype: str :raise: AssertionError
[ ":", "return", ":", "A", "string", "containing", "the", "TSV", "result", "from", "SSOS", ":", "rtype", ":", "str", ":", "raise", ":", "AssertionError" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/ssos.py#L730-L755
OSSOS/MOP
src/ossos/core/ossos/gui/views/mainframe.py
_FocusablePanel.SetFocus
def SetFocus(self, **kwargs): """ Over-rides normal behaviour of shifting focus to any child. Prefers the one set explicityly by use_as_focus. :param **kwargs: """ if self._focus is not None: self._focus.SetFocus() else: # fall back on the default behaviour super(_FocusablePanel, self).SetFocus()
python
def SetFocus(self, **kwargs): """ Over-rides normal behaviour of shifting focus to any child. Prefers the one set explicityly by use_as_focus. :param **kwargs: """ if self._focus is not None: self._focus.SetFocus() else: # fall back on the default behaviour super(_FocusablePanel, self).SetFocus()
[ "def", "SetFocus", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "_focus", "is", "not", "None", ":", "self", ".", "_focus", ".", "SetFocus", "(", ")", "else", ":", "# fall back on the default behaviour", "super", "(", "_FocusablePanel"...
Over-rides normal behaviour of shifting focus to any child. Prefers the one set explicityly by use_as_focus. :param **kwargs:
[ "Over", "-", "rides", "normal", "behaviour", "of", "shifting", "focus", "to", "any", "child", ".", "Prefers", "the", "one", "set", "explicityly", "by", "use_as_focus", ".", ":", "param", "**", "kwargs", ":" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/gui/views/mainframe.py#L116-L126
OSSOS/MOP
src/ossos/core/scripts/stepI.py
step3
def step3(expnums, ccd, version, rate_min, rate_max, angle, width, field=None, prefix=None, dry_run=False, maximum_flux_ratio=3, minimum_area=5, minimum_median_flux=1000.0): """run the actual step3 on the given exp/ccd combo""" jmp_args = ['step3jmp'] matt_args = ['step3jjk'] idx = 0 cmd_args = [] for expnum in expnums: idx += 1 for ext in ['unid.jmp', 'unid.matt']: storage.get_file(expnum, ccd=ccd, version=version, ext=ext, prefix=prefix) image = os.path.splitext(os.path.basename(storage.get_uri(expnum, ccd, version=version, prefix=prefix)))[0] cmd_args.append('-f%d' % idx) cmd_args.append(image) cmd_args.extend(['-rn', str(rate_min), '-rx', str(rate_max), '-a', str(angle), '-w', str(width)]) jmp_args.extend(cmd_args) # Add some extra arguemnents for the ISO search. cmd_args.extend(['-fr', str(maximum_flux_ratio), '-ma', str(minimum_area), '-mf', str(minimum_median_flux)]) matt_args.extend(cmd_args) logging.info(util.exec_prog(jmp_args)) logging.info(util.exec_prog(matt_args)) if dry_run: return if field is None: field = str(expnums[0]) storage.mkdir(os.path.dirname(storage.get_uri(field, ccd=ccd, version=version, prefix=prefix))) for ext in ['moving.jmp', 'moving.matt']: uri = storage.get_uri(field, ccd=ccd, version=version, ext=ext, prefix=prefix) filename = '%s%d%s%s.%s' % (prefix, expnums[0], version, str(ccd).zfill(2), ext) storage.copy(filename, uri) return
python
def step3(expnums, ccd, version, rate_min, rate_max, angle, width, field=None, prefix=None, dry_run=False, maximum_flux_ratio=3, minimum_area=5, minimum_median_flux=1000.0): """run the actual step3 on the given exp/ccd combo""" jmp_args = ['step3jmp'] matt_args = ['step3jjk'] idx = 0 cmd_args = [] for expnum in expnums: idx += 1 for ext in ['unid.jmp', 'unid.matt']: storage.get_file(expnum, ccd=ccd, version=version, ext=ext, prefix=prefix) image = os.path.splitext(os.path.basename(storage.get_uri(expnum, ccd, version=version, prefix=prefix)))[0] cmd_args.append('-f%d' % idx) cmd_args.append(image) cmd_args.extend(['-rn', str(rate_min), '-rx', str(rate_max), '-a', str(angle), '-w', str(width)]) jmp_args.extend(cmd_args) # Add some extra arguemnents for the ISO search. cmd_args.extend(['-fr', str(maximum_flux_ratio), '-ma', str(minimum_area), '-mf', str(minimum_median_flux)]) matt_args.extend(cmd_args) logging.info(util.exec_prog(jmp_args)) logging.info(util.exec_prog(matt_args)) if dry_run: return if field is None: field = str(expnums[0]) storage.mkdir(os.path.dirname(storage.get_uri(field, ccd=ccd, version=version, prefix=prefix))) for ext in ['moving.jmp', 'moving.matt']: uri = storage.get_uri(field, ccd=ccd, version=version, ext=ext, prefix=prefix) filename = '%s%d%s%s.%s' % (prefix, expnums[0], version, str(ccd).zfill(2), ext) storage.copy(filename, uri) return
[ "def", "step3", "(", "expnums", ",", "ccd", ",", "version", ",", "rate_min", ",", "rate_max", ",", "angle", ",", "width", ",", "field", "=", "None", ",", "prefix", "=", "None", ",", "dry_run", "=", "False", ",", "maximum_flux_ratio", "=", "3", ",", "...
run the actual step3 on the given exp/ccd combo
[ "run", "the", "actual", "step3", "on", "the", "given", "exp", "/", "ccd", "combo" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/scripts/stepI.py#L38-L93
JohnVinyard/zounds
zounds/persistence/util.py
extract_init_args
def extract_init_args(instance): """ Given an instance, and under the assumption that member variables have the same name as the __init__ arguments, extract the arguments so they can be used to reconstruct the instance when deserializing """ cls = instance.__class__ args = [x for x in inspect.getargspec(cls.__init__).args if x != 'self'] return [instance.__dict__[key] for key in args]
python
def extract_init_args(instance): """ Given an instance, and under the assumption that member variables have the same name as the __init__ arguments, extract the arguments so they can be used to reconstruct the instance when deserializing """ cls = instance.__class__ args = [x for x in inspect.getargspec(cls.__init__).args if x != 'self'] return [instance.__dict__[key] for key in args]
[ "def", "extract_init_args", "(", "instance", ")", ":", "cls", "=", "instance", ".", "__class__", "args", "=", "[", "x", "for", "x", "in", "inspect", ".", "getargspec", "(", "cls", ".", "__init__", ")", ".", "args", "if", "x", "!=", "'self'", "]", "re...
Given an instance, and under the assumption that member variables have the same name as the __init__ arguments, extract the arguments so they can be used to reconstruct the instance when deserializing
[ "Given", "an", "instance", "and", "under", "the", "assumption", "that", "member", "variables", "have", "the", "same", "name", "as", "the", "__init__", "arguments", "extract", "the", "arguments", "so", "they", "can", "be", "used", "to", "reconstruct", "the", ...
train
https://github.com/JohnVinyard/zounds/blob/337b3f98753d09eaab1c72dcd37bb852a3fa5ac6/zounds/persistence/util.py#L26-L34
OSSOS/MOP
src/jjk/preproc/MOPwindow.py
MOPwindow.list
def list(self,header,choices): "Display list of choices. As many as we can get in a page." if not self.__list_window: (y,x)=self.__main.getmaxyx() self.__list_window = self.__main.subwin(35,x,0,0) _lw=self.__list_window _lw.keypad(1) (y_max,x_max)=_lw.getmaxyx() (y_0, x_0)=_lw.getbegyx() x_start=1+x_0 _lw.box() ## Number of list items allowed. ### first entry in the list appears at page_top page_top=y_0+2 ### the last entry display will be at page_bottom page_bottom = y_max-2 ### break the list into chunks. max_items_per_page = page_bottom-page_top ### start at the top of the list top_item=0 f=open('log.msg','w') first_item=page_top current_item=0 item_list=[] while 1: _lw.erase() _lw.box() _lw.addstr(page_top-1,x_start,header) if top_item > len(choices): top_item=0 for i in range(max_items_per_page): item=i+top_item if not item in range(len(choices)): break _lw.addstr(i+page_top,x_start,choices[item]) ### provide a hint that there is more info in the list ### setup where we are in the list last_item=item if top_item > 0 : _lw.addstr(page_bottom,x_start,"P(revious)") if last_item < len(choices): _lw.addstr(page_bottom,x_max-8,"N(ext)") while 1: c=_lw.getch(current_item-top_item+page_top,x_start) if c==curses.KEY_UP: current_item=current_item-1 elif c==curses.KEY_DOWN: current_item=current_item+1 elif c==ord(' '): if current_item in item_list: _lw.addstr(choices[current_item]) item_list.remove(current_item) else: _lw.addstr(choices[current_item],curses.A_REVERSE) item_list.append(current_item) elif c==ord('P'): top_item=top_item-max_items_per_page current_item=top_item break elif c==ord('N'): top_item=top_item + max_items_per_page current_item=top_item break elif c==10: return(item_list) elif c==ord('q'): _lw.erase() return(None) elif c==ord('x'): choices[current_item]=choices[current_item][:4]+" "+choices[current_item][5:] _lw.addstr(choices[current_item]) else: choices[current_item]=choices[current_item][:7]+chr(c).capitalize()+choices[current_item][8:] _lw.addstr(choices[current_item]) if current_item > last_item-1: if last_item < len(choices): top_item = top_item+1 break else: current_item=current_item-1 if current_item < top_item : if top_item > 0: top_item = top_item-1 break else: current_item=current_item+1
python
def list(self,header,choices): "Display list of choices. As many as we can get in a page." if not self.__list_window: (y,x)=self.__main.getmaxyx() self.__list_window = self.__main.subwin(35,x,0,0) _lw=self.__list_window _lw.keypad(1) (y_max,x_max)=_lw.getmaxyx() (y_0, x_0)=_lw.getbegyx() x_start=1+x_0 _lw.box() ## Number of list items allowed. ### first entry in the list appears at page_top page_top=y_0+2 ### the last entry display will be at page_bottom page_bottom = y_max-2 ### break the list into chunks. max_items_per_page = page_bottom-page_top ### start at the top of the list top_item=0 f=open('log.msg','w') first_item=page_top current_item=0 item_list=[] while 1: _lw.erase() _lw.box() _lw.addstr(page_top-1,x_start,header) if top_item > len(choices): top_item=0 for i in range(max_items_per_page): item=i+top_item if not item in range(len(choices)): break _lw.addstr(i+page_top,x_start,choices[item]) ### provide a hint that there is more info in the list ### setup where we are in the list last_item=item if top_item > 0 : _lw.addstr(page_bottom,x_start,"P(revious)") if last_item < len(choices): _lw.addstr(page_bottom,x_max-8,"N(ext)") while 1: c=_lw.getch(current_item-top_item+page_top,x_start) if c==curses.KEY_UP: current_item=current_item-1 elif c==curses.KEY_DOWN: current_item=current_item+1 elif c==ord(' '): if current_item in item_list: _lw.addstr(choices[current_item]) item_list.remove(current_item) else: _lw.addstr(choices[current_item],curses.A_REVERSE) item_list.append(current_item) elif c==ord('P'): top_item=top_item-max_items_per_page current_item=top_item break elif c==ord('N'): top_item=top_item + max_items_per_page current_item=top_item break elif c==10: return(item_list) elif c==ord('q'): _lw.erase() return(None) elif c==ord('x'): choices[current_item]=choices[current_item][:4]+" "+choices[current_item][5:] _lw.addstr(choices[current_item]) else: choices[current_item]=choices[current_item][:7]+chr(c).capitalize()+choices[current_item][8:] _lw.addstr(choices[current_item]) if current_item > last_item-1: if last_item < len(choices): top_item = top_item+1 break else: current_item=current_item-1 if current_item < top_item : if top_item > 0: top_item = top_item-1 break else: current_item=current_item+1
[ "def", "list", "(", "self", ",", "header", ",", "choices", ")", ":", "if", "not", "self", ".", "__list_window", ":", "(", "y", ",", "x", ")", "=", "self", ".", "__main", ".", "getmaxyx", "(", ")", "self", ".", "__list_window", "=", "self", ".", "...
Display list of choices. As many as we can get in a page.
[ "Display", "list", "of", "choices", ".", "As", "many", "as", "we", "can", "get", "in", "a", "page", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/MOPwindow.py#L41-L133
OSSOS/MOP
src/jjk/preproc/wcsutil.py
WCSObject.recenter
def recenter(self): """ Reset the reference position values to correspond to the center of the reference frame. Algorithm used here developed by Colin Cox - 27-Jan-2004. """ if self.ctype1.find('TAN') < 0 or self.ctype2.find('TAN') < 0: print 'WCS.recenter() only supported for TAN projections.' raise TypeError # Check to see if WCS is already centered... if self.crpix1 == self.naxis1/2. and self.crpix2 == self.naxis2/2.: # No recentering necessary... return without changing WCS. return # This offset aligns the WCS to the center of the pixel, in accordance # with the 'align=center' option used by 'drizzle'. #_drz_off = -0.5 _drz_off = 0. _cen = (self.naxis1/2.+ _drz_off,self.naxis2/2. + _drz_off) # Compute the RA and Dec for center pixel _cenrd = self.xy2rd(_cen) _cd = N.array([[self.cd11,self.cd12],[self.cd21,self.cd22]],type=N.Float64) _ra0 = DEGTORAD(self.crval1) _dec0 = DEGTORAD(self.crval2) _ra = DEGTORAD(_cenrd[0]) _dec = DEGTORAD(_cenrd[1]) # Set up some terms for use in the final result _dx = self.naxis1/2. - self.crpix1 _dy = self.naxis2/2. - self.crpix2 _dE,_dN = DEGTORAD(N.dot(_cd,(_dx,_dy))) _dE_dN = 1 + N.power(_dE,2) + N.power(_dN,2) _cosdec = N.cos(_dec) _sindec = N.sin(_dec) _cosdec0 = N.cos(_dec0) _sindec0 = N.sin(_dec0) _n1 = N.power(_cosdec,2) + _dE*_dE + _dN*_dN*N.power(_sindec,2) _dra_dE = (_cosdec0 - _dN*_sindec0)/_n1 _dra_dN = _dE*_sindec0 /_n1 _ddec_dE = -_dE*N.tan(_dec) / _dE_dN _ddec_dN = (1/_cosdec) * ((_cosdec0 / N.sqrt(_dE_dN)) - (_dN*N.sin(_dec) / _dE_dN)) # Compute new CD matrix values now... _cd11n = _cosdec * (self.cd11*_dra_dE + self.cd21 * _dra_dN) _cd12n = _cosdec * (self.cd12*_dra_dE + self.cd22 * _dra_dN) _cd21n = self.cd11 * _ddec_dE + self.cd21 * _ddec_dN _cd22n = self.cd12 * _ddec_dE + self.cd22 * _ddec_dN _new_orient = RADTODEG(N.arctan2(_cd12n,_cd22n)) # Update the values now... self.crpix1 = _cen[0] self.crpix2 = _cen[1] self.crval1 = RADTODEG(_ra) self.crval2 = RADTODEG(_dec) # Keep the same plate scale, only change the orientation self.rotateCD(_new_orient) # These would update the CD matrix with the new rotation # ALONG with the new plate scale which we do not want. self.cd11 = _cd11n self.cd12 = _cd12n self.cd21 = _cd21n self.cd22 = _cd22n
python
def recenter(self): """ Reset the reference position values to correspond to the center of the reference frame. Algorithm used here developed by Colin Cox - 27-Jan-2004. """ if self.ctype1.find('TAN') < 0 or self.ctype2.find('TAN') < 0: print 'WCS.recenter() only supported for TAN projections.' raise TypeError # Check to see if WCS is already centered... if self.crpix1 == self.naxis1/2. and self.crpix2 == self.naxis2/2.: # No recentering necessary... return without changing WCS. return # This offset aligns the WCS to the center of the pixel, in accordance # with the 'align=center' option used by 'drizzle'. #_drz_off = -0.5 _drz_off = 0. _cen = (self.naxis1/2.+ _drz_off,self.naxis2/2. + _drz_off) # Compute the RA and Dec for center pixel _cenrd = self.xy2rd(_cen) _cd = N.array([[self.cd11,self.cd12],[self.cd21,self.cd22]],type=N.Float64) _ra0 = DEGTORAD(self.crval1) _dec0 = DEGTORAD(self.crval2) _ra = DEGTORAD(_cenrd[0]) _dec = DEGTORAD(_cenrd[1]) # Set up some terms for use in the final result _dx = self.naxis1/2. - self.crpix1 _dy = self.naxis2/2. - self.crpix2 _dE,_dN = DEGTORAD(N.dot(_cd,(_dx,_dy))) _dE_dN = 1 + N.power(_dE,2) + N.power(_dN,2) _cosdec = N.cos(_dec) _sindec = N.sin(_dec) _cosdec0 = N.cos(_dec0) _sindec0 = N.sin(_dec0) _n1 = N.power(_cosdec,2) + _dE*_dE + _dN*_dN*N.power(_sindec,2) _dra_dE = (_cosdec0 - _dN*_sindec0)/_n1 _dra_dN = _dE*_sindec0 /_n1 _ddec_dE = -_dE*N.tan(_dec) / _dE_dN _ddec_dN = (1/_cosdec) * ((_cosdec0 / N.sqrt(_dE_dN)) - (_dN*N.sin(_dec) / _dE_dN)) # Compute new CD matrix values now... _cd11n = _cosdec * (self.cd11*_dra_dE + self.cd21 * _dra_dN) _cd12n = _cosdec * (self.cd12*_dra_dE + self.cd22 * _dra_dN) _cd21n = self.cd11 * _ddec_dE + self.cd21 * _ddec_dN _cd22n = self.cd12 * _ddec_dE + self.cd22 * _ddec_dN _new_orient = RADTODEG(N.arctan2(_cd12n,_cd22n)) # Update the values now... self.crpix1 = _cen[0] self.crpix2 = _cen[1] self.crval1 = RADTODEG(_ra) self.crval2 = RADTODEG(_dec) # Keep the same plate scale, only change the orientation self.rotateCD(_new_orient) # These would update the CD matrix with the new rotation # ALONG with the new plate scale which we do not want. self.cd11 = _cd11n self.cd12 = _cd12n self.cd21 = _cd21n self.cd22 = _cd22n
[ "def", "recenter", "(", "self", ")", ":", "if", "self", ".", "ctype1", ".", "find", "(", "'TAN'", ")", "<", "0", "or", "self", ".", "ctype2", ".", "find", "(", "'TAN'", ")", "<", "0", ":", "print", "'WCS.recenter() only supported for TAN projections.'", ...
Reset the reference position values to correspond to the center of the reference frame. Algorithm used here developed by Colin Cox - 27-Jan-2004.
[ "Reset", "the", "reference", "position", "values", "to", "correspond", "to", "the", "center", "of", "the", "reference", "frame", ".", "Algorithm", "used", "here", "developed", "by", "Colin", "Cox", "-", "27", "-", "Jan", "-", "2004", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/wcsutil.py#L262-L331
OSSOS/MOP
src/jjk/preproc/wcsutil.py
WCSObject._buildNewKeyname
def _buildNewKeyname(self,key,prepend): """ Builds a new keyword based on original keyword name and a prepend string. """ if len(prepend+key) <= 8: _new_key = prepend+key else: _new_key = str(prepend+key)[:8] return _new_key
python
def _buildNewKeyname(self,key,prepend): """ Builds a new keyword based on original keyword name and a prepend string. """ if len(prepend+key) <= 8: _new_key = prepend+key else: _new_key = str(prepend+key)[:8] return _new_key
[ "def", "_buildNewKeyname", "(", "self", ",", "key", ",", "prepend", ")", ":", "if", "len", "(", "prepend", "+", "key", ")", "<=", "8", ":", "_new_key", "=", "prepend", "+", "key", "else", ":", "_new_key", "=", "str", "(", "prepend", "+", "key", ")"...
Builds a new keyword based on original keyword name and a prepend string.
[ "Builds", "a", "new", "keyword", "based", "on", "original", "keyword", "name", "and", "a", "prepend", "string", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/wcsutil.py#L333-L341
OSSOS/MOP
src/jjk/preproc/wcsutil.py
WCSObject.copy
def copy(self,deep=yes): """ Makes a (deep)copy of this object for use by other objects. """ if deep: return copy.deepcopy(self) else: return copy.copy(self)
python
def copy(self,deep=yes): """ Makes a (deep)copy of this object for use by other objects. """ if deep: return copy.deepcopy(self) else: return copy.copy(self)
[ "def", "copy", "(", "self", ",", "deep", "=", "yes", ")", ":", "if", "deep", ":", "return", "copy", ".", "deepcopy", "(", "self", ")", "else", ":", "return", "copy", ".", "copy", "(", "self", ")" ]
Makes a (deep)copy of this object for use by other objects.
[ "Makes", "a", "(", "deep", ")", "copy", "of", "this", "object", "for", "use", "by", "other", "objects", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/wcsutil.py#L344-L350
OSSOS/MOP
src/jjk/preproc/scrample.py
scrambleTriples
def scrambleTriples(expnums,ccd): """Pull the three images and then scramble the MJD-OBS keywords""" import pyfits, MOPfits mjd=[] fid=[] fs=[] filenames=[] for expnum in expnums: if int(ccd)<18: cutout="[-*,-*]" else: cutout="[*,*]" filenames.append(MOPfits.adGet(str(expnum)+opt.raw,extno=int(ccd),cutout=cutout)) fs.append(pyfits.open(filenames[-1])) mjd.append(fs[-1][0].header.get("MJD-OBS")) fid.append(fs[-1][0].header.get("EXPNUM")) if not os.access('weight.fits',os.F_OK): os.symlink(MOPfits.adGet("weight",extno=int(ccd),cutout=cutout),'weight.fits') for filename in filenames: if not os.access(filename,os.R_OK): sys.stderr.write("Ad Get Failed\n") raise TaskError, 'adGet Failed' order=[2,0,1] basenames=[] for i in range(len(fs)): fs[i][0].header.update("MJD-OBS",mjd[order[i]]); fs[i][0].header.update("EXPNUM",fid[order[i]]); filename=filenames[order[i]].replace(opt.raw,'s') basenames.append(os.path.splitext(filename)[0]) if os.access(filename,os.F_OK): os.unlink(filename) fs[i][0].writeto(filename,output_verify='ignore') os.unlink(filenames[order[i]]) return basenames
python
def scrambleTriples(expnums,ccd): """Pull the three images and then scramble the MJD-OBS keywords""" import pyfits, MOPfits mjd=[] fid=[] fs=[] filenames=[] for expnum in expnums: if int(ccd)<18: cutout="[-*,-*]" else: cutout="[*,*]" filenames.append(MOPfits.adGet(str(expnum)+opt.raw,extno=int(ccd),cutout=cutout)) fs.append(pyfits.open(filenames[-1])) mjd.append(fs[-1][0].header.get("MJD-OBS")) fid.append(fs[-1][0].header.get("EXPNUM")) if not os.access('weight.fits',os.F_OK): os.symlink(MOPfits.adGet("weight",extno=int(ccd),cutout=cutout),'weight.fits') for filename in filenames: if not os.access(filename,os.R_OK): sys.stderr.write("Ad Get Failed\n") raise TaskError, 'adGet Failed' order=[2,0,1] basenames=[] for i in range(len(fs)): fs[i][0].header.update("MJD-OBS",mjd[order[i]]); fs[i][0].header.update("EXPNUM",fid[order[i]]); filename=filenames[order[i]].replace(opt.raw,'s') basenames.append(os.path.splitext(filename)[0]) if os.access(filename,os.F_OK): os.unlink(filename) fs[i][0].writeto(filename,output_verify='ignore') os.unlink(filenames[order[i]]) return basenames
[ "def", "scrambleTriples", "(", "expnums", ",", "ccd", ")", ":", "import", "pyfits", ",", "MOPfits", "mjd", "=", "[", "]", "fid", "=", "[", "]", "fs", "=", "[", "]", "filenames", "=", "[", "]", "for", "expnum", "in", "expnums", ":", "if", "int", "...
Pull the three images and then scramble the MJD-OBS keywords
[ "Pull", "the", "three", "images", "and", "then", "scramble", "the", "MJD", "-", "OBS", "keywords" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/scrample.py#L22-L59
OSSOS/MOP
src/jjk/preproc/scrample.py
getTriples
def getTriples(expnums,ccd): """Pull the three images and then scramble the MJD-OBS keywords""" import pyfits, MOPfits filenames=[] for expnum in expnums: if int(ccd)<18: cutout="[-*,-*]" else: cutout="[*,*]" filenames.append(MOPfits.adGet(str(expnum)+opt.raw,extno=int(ccd),cutout=cutout)) for filename in filenames: if not os.access(filename,os.R_OK): sys.stderr.write("Ad Get Failed\n") raise TaskError, 'adGet Failed' if not os.access('weight.fits',os.F_OK): os.symlink(MOPfits.adGet("weight",extno=int(ccd),cutout=cutout),'weight.fits') basenames=[] for filename in filenames: filename=os.path.splitext(filename) basenames.append(filename[0]) print filenames, basenames return basenames
python
def getTriples(expnums,ccd): """Pull the three images and then scramble the MJD-OBS keywords""" import pyfits, MOPfits filenames=[] for expnum in expnums: if int(ccd)<18: cutout="[-*,-*]" else: cutout="[*,*]" filenames.append(MOPfits.adGet(str(expnum)+opt.raw,extno=int(ccd),cutout=cutout)) for filename in filenames: if not os.access(filename,os.R_OK): sys.stderr.write("Ad Get Failed\n") raise TaskError, 'adGet Failed' if not os.access('weight.fits',os.F_OK): os.symlink(MOPfits.adGet("weight",extno=int(ccd),cutout=cutout),'weight.fits') basenames=[] for filename in filenames: filename=os.path.splitext(filename) basenames.append(filename[0]) print filenames, basenames return basenames
[ "def", "getTriples", "(", "expnums", ",", "ccd", ")", ":", "import", "pyfits", ",", "MOPfits", "filenames", "=", "[", "]", "for", "expnum", "in", "expnums", ":", "if", "int", "(", "ccd", ")", "<", "18", ":", "cutout", "=", "\"[-*,-*]\"", "else", ":",...
Pull the three images and then scramble the MJD-OBS keywords
[ "Pull", "the", "three", "images", "and", "then", "scramble", "the", "MJD", "-", "OBS", "keywords" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/scrample.py#L62-L87
OSSOS/MOP
src/jjk/preproc/scrample.py
searchTriples
def searchTriples(filenames,plant=False): """Given a list of exposure numbers, find all the KBOs in that set of exposures""" print filenames if opt.none : return import MOPfits,os import MOPdbaccess import string import os.path import pyfits if len(filenames)!=3: raise TaskError, "got %d exposures"%(len(expnums)) ### Some program Constants proc_these_files=[] if not plant: proc_these_files.append("# Files to be planted and searched\n") proc_these_files.append("# image fwhm plant\n") for filename in filenames: try: mysql=MOPdbaccess.connect('bucket','cfhls','MYSQL') bucket=mysql.cursor() except: raise TaskError, "mysql failed" #bucket.execute("SELECT obs_iq_refccd FROM exposure WHERE expnum=%s" , (expnum, ) ) #row=bucket.fetchone() #mysql.close() #fwhm=row[0] #if not fwhm > 0: fwhm=1.0 if not plant: #proc_these_files.append("%s %f %s \n" % ( filename[0], fwhm/0.183, 'no')) pstr='NO' else: pstr='YES' ### since we're planting we need a psf. JMPMAKEPSF will ### update the proc-these-files listing ### run the make psf script .. always. This creates proc-these-files ### which is needed by the find.pl script. command='jmpmakepsf.csh ./ %s %s' % ( filename, pstr ) if opt.verbose: sys.stderr.write( command ) try: os.system(command) except: raise TaskError, "jmpmakepsf noexec" if os.access(filename+'.jmpmakepsf.FAILED',os.R_OK) or not os.access(filename+".psf.fits", os.R_OK) : if plant: raise TaskError, "jmpmakepsf failed" # do without plant else: plant=False pstr='NO' ### we're not planting so, lets keep going ### but check that there is a line in proc_these_files add_line=True if not os.access('proc-these-files',os.R_OK): f=open('proc-these-files','w') for l in proc_these_files: f.write(l) f.close() f=open('proc-these-files','r') ptf_lines=f.readlines() f.close() for ptf_line in ptf_lines: if ptf_line[0]=='#': continue ptf_a=ptf_line.split() import re if re.search('%s' % (filename),ptf_a[0]): ### there's already a line for this one add_line=False break if add_line: f=open('proc-these-files','a') f.write("%s %f %s \n" % ( filename, fwhm/0.183, 'no')) f.close() if opt.none: return(-1) prefix='' if plant: command="plant.csh ./ " #command="plant.csh ./ -rmin %s -rmax %s -ang %s -width %s " % ( opt.rmin, opt.rmax, opt.angle, opt.width) try: os.system(command) except: raise TaskError, 'plant exec. failed' if not os.access('plant.OK',os.R_OK): raise TaskError, 'plant failed' prefix='fk' #else: # f=open('proc-these-files','w') # for line in proc_these_files: # f.write(line) # f.flush() # f.close() if opt.rerun and os.access('find.OK',os.R_OK): os.unlink("find.OK") #command="find.pl -p "+prefix+" -rn %s -rx %s -a %s -aw %s -d ./ " % ( opt.rmin, opt.rmax, opt.angle, opt.width) command="find.pl -p "+prefix+" -d ./ " if opt.union : command+=" -u" if opt.verbose: sys.stderr.write( command ) try: os.system(command) except: raise TaskErorr, "execute find" if not os.access("find.OK",os.R_OK): raise TaskError, "find failed" ### check the transformation file command = "checktrans -p "+prefix try: os.system(command) except: raise TaskError, "execute checktrans" if not os.access("checktrans.OK",os.R_OK): raise TaskError, "checktrans failed" elif os.access("checktrans.FAILED",os.R_OK): os.unlink("checktrans.FAILED") if os.access("BAD_TRANS"+prefix,os.R_OK): raise TaskError,"BAD TRANS" ## check that the transformation in .trans.jmp files look reasonable import math for filename in filenames: try: for line in open(filename+".trans.jmp"): for v in line.split(): if math.fabs(float(v)) > 200: raise TaskError,"BAD TRANS" except: raise TaskError, "TRAN_CHECK FAILED" astrom=prefix+filenames[0]+".cands.comb" if opt.plant: for filename in filenames: try: ushort(prefix+filename+".fits") except: raise TaskError("ushort failed %s" % (prefix+filename+".fits")) if opt.plant: astrom=prefix+filenames[0]+".comb.found" try: #make sure we have +5 lines in this file lines=file(astrom).readlines() if len(lines)<5: raise TaskError,"Too few Found" except: raise TaskError, "Error reading %s" %(astrom) if os.access(astrom,os.R_OK): return(1) else: return(0)
python
def searchTriples(filenames,plant=False): """Given a list of exposure numbers, find all the KBOs in that set of exposures""" print filenames if opt.none : return import MOPfits,os import MOPdbaccess import string import os.path import pyfits if len(filenames)!=3: raise TaskError, "got %d exposures"%(len(expnums)) ### Some program Constants proc_these_files=[] if not plant: proc_these_files.append("# Files to be planted and searched\n") proc_these_files.append("# image fwhm plant\n") for filename in filenames: try: mysql=MOPdbaccess.connect('bucket','cfhls','MYSQL') bucket=mysql.cursor() except: raise TaskError, "mysql failed" #bucket.execute("SELECT obs_iq_refccd FROM exposure WHERE expnum=%s" , (expnum, ) ) #row=bucket.fetchone() #mysql.close() #fwhm=row[0] #if not fwhm > 0: fwhm=1.0 if not plant: #proc_these_files.append("%s %f %s \n" % ( filename[0], fwhm/0.183, 'no')) pstr='NO' else: pstr='YES' ### since we're planting we need a psf. JMPMAKEPSF will ### update the proc-these-files listing ### run the make psf script .. always. This creates proc-these-files ### which is needed by the find.pl script. command='jmpmakepsf.csh ./ %s %s' % ( filename, pstr ) if opt.verbose: sys.stderr.write( command ) try: os.system(command) except: raise TaskError, "jmpmakepsf noexec" if os.access(filename+'.jmpmakepsf.FAILED',os.R_OK) or not os.access(filename+".psf.fits", os.R_OK) : if plant: raise TaskError, "jmpmakepsf failed" # do without plant else: plant=False pstr='NO' ### we're not planting so, lets keep going ### but check that there is a line in proc_these_files add_line=True if not os.access('proc-these-files',os.R_OK): f=open('proc-these-files','w') for l in proc_these_files: f.write(l) f.close() f=open('proc-these-files','r') ptf_lines=f.readlines() f.close() for ptf_line in ptf_lines: if ptf_line[0]=='#': continue ptf_a=ptf_line.split() import re if re.search('%s' % (filename),ptf_a[0]): ### there's already a line for this one add_line=False break if add_line: f=open('proc-these-files','a') f.write("%s %f %s \n" % ( filename, fwhm/0.183, 'no')) f.close() if opt.none: return(-1) prefix='' if plant: command="plant.csh ./ " #command="plant.csh ./ -rmin %s -rmax %s -ang %s -width %s " % ( opt.rmin, opt.rmax, opt.angle, opt.width) try: os.system(command) except: raise TaskError, 'plant exec. failed' if not os.access('plant.OK',os.R_OK): raise TaskError, 'plant failed' prefix='fk' #else: # f=open('proc-these-files','w') # for line in proc_these_files: # f.write(line) # f.flush() # f.close() if opt.rerun and os.access('find.OK',os.R_OK): os.unlink("find.OK") #command="find.pl -p "+prefix+" -rn %s -rx %s -a %s -aw %s -d ./ " % ( opt.rmin, opt.rmax, opt.angle, opt.width) command="find.pl -p "+prefix+" -d ./ " if opt.union : command+=" -u" if opt.verbose: sys.stderr.write( command ) try: os.system(command) except: raise TaskErorr, "execute find" if not os.access("find.OK",os.R_OK): raise TaskError, "find failed" ### check the transformation file command = "checktrans -p "+prefix try: os.system(command) except: raise TaskError, "execute checktrans" if not os.access("checktrans.OK",os.R_OK): raise TaskError, "checktrans failed" elif os.access("checktrans.FAILED",os.R_OK): os.unlink("checktrans.FAILED") if os.access("BAD_TRANS"+prefix,os.R_OK): raise TaskError,"BAD TRANS" ## check that the transformation in .trans.jmp files look reasonable import math for filename in filenames: try: for line in open(filename+".trans.jmp"): for v in line.split(): if math.fabs(float(v)) > 200: raise TaskError,"BAD TRANS" except: raise TaskError, "TRAN_CHECK FAILED" astrom=prefix+filenames[0]+".cands.comb" if opt.plant: for filename in filenames: try: ushort(prefix+filename+".fits") except: raise TaskError("ushort failed %s" % (prefix+filename+".fits")) if opt.plant: astrom=prefix+filenames[0]+".comb.found" try: #make sure we have +5 lines in this file lines=file(astrom).readlines() if len(lines)<5: raise TaskError,"Too few Found" except: raise TaskError, "Error reading %s" %(astrom) if os.access(astrom,os.R_OK): return(1) else: return(0)
[ "def", "searchTriples", "(", "filenames", ",", "plant", "=", "False", ")", ":", "print", "filenames", "if", "opt", ".", "none", ":", "return", "import", "MOPfits", ",", "os", "import", "MOPdbaccess", "import", "string", "import", "os", ".", "path", "import...
Given a list of exposure numbers, find all the KBOs in that set of exposures
[ "Given", "a", "list", "of", "exposure", "numbers", "find", "all", "the", "KBOs", "in", "that", "set", "of", "exposures" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/scrample.py#L89-L266
OSSOS/MOP
src/jjk/preproc/scrample.py
ushort
def ushort(filename): """Ushort a the pixels""" import pyfits f=pyfits.open(filename,mode='update') f[0].scale('int16','',bzero=32768) f.flush() f.close()
python
def ushort(filename): """Ushort a the pixels""" import pyfits f=pyfits.open(filename,mode='update') f[0].scale('int16','',bzero=32768) f.flush() f.close()
[ "def", "ushort", "(", "filename", ")", ":", "import", "pyfits", "f", "=", "pyfits", ".", "open", "(", "filename", ",", "mode", "=", "'update'", ")", "f", "[", "0", "]", ".", "scale", "(", "'int16'", ",", "''", ",", "bzero", "=", "32768", ")", "f"...
Ushort a the pixels
[ "Ushort", "a", "the", "pixels" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/scrample.py#L268-L274
OSSOS/MOP
src/ossos/core/ossos/astrom.py
AstromParser.parse
def parse(self, filename): """ Parses a file into an AstromData structure. Args: filename: str The name of the file whose contents will be parsed. Returns: data: AstromData The file contents extracted into a data structure for programmatic access. """ filehandle = storage.open_vos_or_local(filename, "rb") assert filehandle is not None, "Failed to open file {} ".format(filename) filestr = filehandle.read() filehandle.close() assert filestr is not None, "File contents are None" observations = self._parse_observation_list(filestr) self._parse_observation_headers(filestr, observations) sys_header = self._parse_system_header(filestr) sources = self._parse_source_data(filestr, observations) return AstromData(observations, sys_header, sources, discovery_only=self.discovery_only)
python
def parse(self, filename): """ Parses a file into an AstromData structure. Args: filename: str The name of the file whose contents will be parsed. Returns: data: AstromData The file contents extracted into a data structure for programmatic access. """ filehandle = storage.open_vos_or_local(filename, "rb") assert filehandle is not None, "Failed to open file {} ".format(filename) filestr = filehandle.read() filehandle.close() assert filestr is not None, "File contents are None" observations = self._parse_observation_list(filestr) self._parse_observation_headers(filestr, observations) sys_header = self._parse_system_header(filestr) sources = self._parse_source_data(filestr, observations) return AstromData(observations, sys_header, sources, discovery_only=self.discovery_only)
[ "def", "parse", "(", "self", ",", "filename", ")", ":", "filehandle", "=", "storage", ".", "open_vos_or_local", "(", "filename", ",", "\"rb\"", ")", "assert", "filehandle", "is", "not", "None", ",", "\"Failed to open file {} \"", ".", "format", "(", "filename"...
Parses a file into an AstromData structure. Args: filename: str The name of the file whose contents will be parsed. Returns: data: AstromData The file contents extracted into a data structure for programmatic access.
[ "Parses", "a", "file", "into", "an", "AstromData", "structure", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/astrom.py#L209-L237
OSSOS/MOP
src/ossos/core/ossos/astrom.py
BaseAstromWriter._write_observation_headers
def _write_observation_headers(self, observations): """ See src/pipematt/step1matt-c """ for observation in observations: header = observation.header def get_header_vals(header_list): header_vals = [] for key in header_list: val = header[key] if key == MJD_OBS_CENTER: header_vals.append(val) elif key == DETECTOR: header_vals.append(val.ljust(20)) else: header_vals.append(float(val)) return tuple(header_vals) self._write_line("## MOPversion") self._write_line("# %s" % header[MOPVERSION]) self._write_line("## MJD-OBS-CENTER EXPTIME THRES FWHM MAXCOUNT CRVAL1 CRVAL2 EXPNUM") self._write_line("# %s%8.2f%6.2f%6.2f%9.1f%11.5f%11.5f%9d" % get_header_vals( [MJD_OBS_CENTER, EXPTIME, THRES, FWHM, MAXCOUNT, CRVAL1, CRVAL2, EXPNUM])) self._write_line("## SCALE CHIP CRPIX1 CRPIX2 NAX1 NAX2 DETECTOR PHADU RDNOIS") self._write_line("# %6.3f%4d%10.2f%10.2f%6d%6d %s%5.2f %5.2f" % get_header_vals( [SCALE, CHIP, CRPIX1, CRPIX2, NAX1, NAX2, DETECTOR, PHADU, RDNOIS]))
python
def _write_observation_headers(self, observations): """ See src/pipematt/step1matt-c """ for observation in observations: header = observation.header def get_header_vals(header_list): header_vals = [] for key in header_list: val = header[key] if key == MJD_OBS_CENTER: header_vals.append(val) elif key == DETECTOR: header_vals.append(val.ljust(20)) else: header_vals.append(float(val)) return tuple(header_vals) self._write_line("## MOPversion") self._write_line("# %s" % header[MOPVERSION]) self._write_line("## MJD-OBS-CENTER EXPTIME THRES FWHM MAXCOUNT CRVAL1 CRVAL2 EXPNUM") self._write_line("# %s%8.2f%6.2f%6.2f%9.1f%11.5f%11.5f%9d" % get_header_vals( [MJD_OBS_CENTER, EXPTIME, THRES, FWHM, MAXCOUNT, CRVAL1, CRVAL2, EXPNUM])) self._write_line("## SCALE CHIP CRPIX1 CRPIX2 NAX1 NAX2 DETECTOR PHADU RDNOIS") self._write_line("# %6.3f%4d%10.2f%10.2f%6d%6d %s%5.2f %5.2f" % get_header_vals( [SCALE, CHIP, CRPIX1, CRPIX2, NAX1, NAX2, DETECTOR, PHADU, RDNOIS]))
[ "def", "_write_observation_headers", "(", "self", ",", "observations", ")", ":", "for", "observation", "in", "observations", ":", "header", "=", "observation", ".", "header", "def", "get_header_vals", "(", "header_list", ")", ":", "header_vals", "=", "[", "]", ...
See src/pipematt/step1matt-c
[ "See", "src", "/", "pipematt", "/", "step1matt", "-", "c" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/astrom.py#L345-L373
OSSOS/MOP
src/ossos/core/ossos/astrom.py
BaseAstromWriter._write_sys_header
def _write_sys_header(self, sys_header): """ See src/pipematt/step3matt-c """ header_vals = [sys_header[RMIN], sys_header[RMAX], sys_header[ANGLE], sys_header[AWIDTH]] self._write_line("## RMIN RMAX ANGLE AWIDTH") self._write_line("# %8.1f%8.1f%8.1f%8.1f" % tuple(map(float, header_vals)))
python
def _write_sys_header(self, sys_header): """ See src/pipematt/step3matt-c """ header_vals = [sys_header[RMIN], sys_header[RMAX], sys_header[ANGLE], sys_header[AWIDTH]] self._write_line("## RMIN RMAX ANGLE AWIDTH") self._write_line("# %8.1f%8.1f%8.1f%8.1f" % tuple(map(float, header_vals)))
[ "def", "_write_sys_header", "(", "self", ",", "sys_header", ")", ":", "header_vals", "=", "[", "sys_header", "[", "RMIN", "]", ",", "sys_header", "[", "RMAX", "]", ",", "sys_header", "[", "ANGLE", "]", ",", "sys_header", "[", "AWIDTH", "]", "]", "self", ...
See src/pipematt/step3matt-c
[ "See", "src", "/", "pipematt", "/", "step3matt", "-", "c" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/astrom.py#L375-L382
OSSOS/MOP
src/ossos/core/ossos/astrom.py
BaseAstromWriter._write_source_data
def _write_source_data(self, sources): """ See src/jjk/measure3 """ for i, source in enumerate(sources): self._write_source(source)
python
def _write_source_data(self, sources): """ See src/jjk/measure3 """ for i, source in enumerate(sources): self._write_source(source)
[ "def", "_write_source_data", "(", "self", ",", "sources", ")", ":", "for", "i", ",", "source", "in", "enumerate", "(", "sources", ")", ":", "self", ".", "_write_source", "(", "source", ")" ]
See src/jjk/measure3
[ "See", "src", "/", "jjk", "/", "measure3" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/astrom.py#L384-L389
OSSOS/MOP
src/ossos/core/ossos/astrom.py
BaseAstromWriter.write_headers
def write_headers(self, observations, sys_header): """ Writes the header part of the astrom file so that only the source data has to be filled in. """ if self._header_written: raise AstromFormatError("Astrom file already has headers.") self._write_observation_list(observations) self._write_observation_headers(observations) self._write_sys_header(sys_header) self._write_source_header() self._header_written = True
python
def write_headers(self, observations, sys_header): """ Writes the header part of the astrom file so that only the source data has to be filled in. """ if self._header_written: raise AstromFormatError("Astrom file already has headers.") self._write_observation_list(observations) self._write_observation_headers(observations) self._write_sys_header(sys_header) self._write_source_header() self._header_written = True
[ "def", "write_headers", "(", "self", ",", "observations", ",", "sys_header", ")", ":", "if", "self", ".", "_header_written", ":", "raise", "AstromFormatError", "(", "\"Astrom file already has headers.\"", ")", "self", ".", "_write_observation_list", "(", "observations...
Writes the header part of the astrom file so that only the source data has to be filled in.
[ "Writes", "the", "header", "part", "of", "the", "astrom", "file", "so", "that", "only", "the", "source", "data", "has", "to", "be", "filled", "in", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/astrom.py#L402-L415
OSSOS/MOP
src/ossos/core/ossos/astrom.py
StreamingAstromWriter.write_source
def write_source(self, source): """ Writes out data for a single source. """ if not self._header_written: observations = [reading.get_observation() for reading in source.get_readings()] self.write_headers(observations, self.sys_header) self._write_source(source)
python
def write_source(self, source): """ Writes out data for a single source. """ if not self._header_written: observations = [reading.get_observation() for reading in source.get_readings()] self.write_headers(observations, self.sys_header) self._write_source(source)
[ "def", "write_source", "(", "self", ",", "source", ")", ":", "if", "not", "self", ".", "_header_written", ":", "observations", "=", "[", "reading", ".", "get_observation", "(", ")", "for", "reading", "in", "source", ".", "get_readings", "(", ")", "]", "s...
Writes out data for a single source.
[ "Writes", "out", "data", "for", "a", "single", "source", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/astrom.py#L441-L449
OSSOS/MOP
src/ossos/core/ossos/astrom.py
BulkAstromWriter.write_astrom_data
def write_astrom_data(self, astrom_data): """ Writes a full AstromData structure at once. """ self.write_headers(astrom_data.observations, astrom_data.sys_header) self._write_source_data(astrom_data.sources)
python
def write_astrom_data(self, astrom_data): """ Writes a full AstromData structure at once. """ self.write_headers(astrom_data.observations, astrom_data.sys_header) self._write_source_data(astrom_data.sources)
[ "def", "write_astrom_data", "(", "self", ",", "astrom_data", ")", ":", "self", ".", "write_headers", "(", "astrom_data", ".", "observations", ",", "astrom_data", ".", "sys_header", ")", "self", ".", "_write_source_data", "(", "astrom_data", ".", "sources", ")" ]
Writes a full AstromData structure at once.
[ "Writes", "a", "full", "AstromData", "structure", "at", "once", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/astrom.py#L461-L466
OSSOS/MOP
src/ossos/core/ossos/astrom.py
SourceReading._original_frame
def _original_frame(self, x, y): """ Return x/y in the original frame, based on a guess as much as anything. :param x: x pixel coordinate :type x: float :param y: y pixel coordinate :type y: float :return: x,y :rtype: float, float """ if self._inverted: return self.obs.naxis1 - x, self.obs.naxis2 - y return x, y
python
def _original_frame(self, x, y): """ Return x/y in the original frame, based on a guess as much as anything. :param x: x pixel coordinate :type x: float :param y: y pixel coordinate :type y: float :return: x,y :rtype: float, float """ if self._inverted: return self.obs.naxis1 - x, self.obs.naxis2 - y return x, y
[ "def", "_original_frame", "(", "self", ",", "x", ",", "y", ")", ":", "if", "self", ".", "_inverted", ":", "return", "self", ".", "obs", ".", "naxis1", "-", "x", ",", "self", ".", "obs", ".", "naxis2", "-", "y", "return", "x", ",", "y" ]
Return x/y in the original frame, based on a guess as much as anything. :param x: x pixel coordinate :type x: float :param y: y pixel coordinate :type y: float :return: x,y :rtype: float, float
[ "Return", "x", "/", "y", "in", "the", "original", "frame", "based", "on", "a", "guess", "as", "much", "as", "anything", ".", ":", "param", "x", ":", "x", "pixel", "coordinate", ":", "type", "x", ":", "float", ":", "param", "y", ":", "y", "pixel", ...
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/astrom.py#L675-L687
OSSOS/MOP
src/ossos/core/ossos/astrom.py
SourceReading.pix_coord
def pix_coord(self, pix_coord): """ :type pix_coord: list :param pix_coord: an x,y pixel coordinate, origin = 1 """ try: pix_coord = list(pix_coord) except: pass if not isinstance(pix_coord, list) or len(pix_coord) != 2: raise ValueError("pix_coord needs to be set with an (x,y) coordinate pair, got {}".format(pix_coord)) x, y = pix_coord if not isinstance(x, Quantity): x = float(x) * units.pix if not isinstance(y, Quantity): y = float(y) * units.pix self._pix_coord = x, y
python
def pix_coord(self, pix_coord): """ :type pix_coord: list :param pix_coord: an x,y pixel coordinate, origin = 1 """ try: pix_coord = list(pix_coord) except: pass if not isinstance(pix_coord, list) or len(pix_coord) != 2: raise ValueError("pix_coord needs to be set with an (x,y) coordinate pair, got {}".format(pix_coord)) x, y = pix_coord if not isinstance(x, Quantity): x = float(x) * units.pix if not isinstance(y, Quantity): y = float(y) * units.pix self._pix_coord = x, y
[ "def", "pix_coord", "(", "self", ",", "pix_coord", ")", ":", "try", ":", "pix_coord", "=", "list", "(", "pix_coord", ")", "except", ":", "pass", "if", "not", "isinstance", "(", "pix_coord", ",", "list", ")", "or", "len", "(", "pix_coord", ")", "!=", ...
:type pix_coord: list :param pix_coord: an x,y pixel coordinate, origin = 1
[ ":", "type", "pix_coord", ":", "list", ":", "param", "pix_coord", ":", "an", "x", "y", "pixel", "coordinate", "origin", "=", "1" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/astrom.py#L725-L741
OSSOS/MOP
src/ossos/core/ossos/astrom.py
SourceReading.reference_source_point
def reference_source_point(self): """ The location of the source in the reference image, in terms of the current image coordinates. """ xref = isinstance(self.xref, Quantity) and self.xref.value or self.xref yref = isinstance(self.yref, Quantity) and self.yref.value or self.yref return xref + self.x_ref_offset, yref + self.y_ref_offset
python
def reference_source_point(self): """ The location of the source in the reference image, in terms of the current image coordinates. """ xref = isinstance(self.xref, Quantity) and self.xref.value or self.xref yref = isinstance(self.yref, Quantity) and self.yref.value or self.yref return xref + self.x_ref_offset, yref + self.y_ref_offset
[ "def", "reference_source_point", "(", "self", ")", ":", "xref", "=", "isinstance", "(", "self", ".", "xref", ",", "Quantity", ")", "and", "self", ".", "xref", ".", "value", "or", "self", ".", "xref", "yref", "=", "isinstance", "(", "self", ".", "yref",...
The location of the source in the reference image, in terms of the current image coordinates.
[ "The", "location", "of", "the", "source", "in", "the", "reference", "image", "in", "terms", "of", "the", "current", "image", "coordinates", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/astrom.py#L871-L879
OSSOS/MOP
src/ossos/core/ossos/astrom.py
SourceReading.get_coordinate_offset
def get_coordinate_offset(self, other_reading): """ Calculates the offsets between readings' coordinate systems. Args: other_reading: ossos.astrom.SourceReading The reading to compare coordinate systems with. Returns: (offset_x, offset_y): The x and y offsets between this reading and the other reading's coordinate systems. """ my_x, my_y = self.reference_source_point other_x, other_y = other_reading.reference_source_point return my_x - other_x, my_y - other_y
python
def get_coordinate_offset(self, other_reading): """ Calculates the offsets between readings' coordinate systems. Args: other_reading: ossos.astrom.SourceReading The reading to compare coordinate systems with. Returns: (offset_x, offset_y): The x and y offsets between this reading and the other reading's coordinate systems. """ my_x, my_y = self.reference_source_point other_x, other_y = other_reading.reference_source_point return my_x - other_x, my_y - other_y
[ "def", "get_coordinate_offset", "(", "self", ",", "other_reading", ")", ":", "my_x", ",", "my_y", "=", "self", ".", "reference_source_point", "other_x", ",", "other_y", "=", "other_reading", ".", "reference_source_point", "return", "my_x", "-", "other_x", ",", "...
Calculates the offsets between readings' coordinate systems. Args: other_reading: ossos.astrom.SourceReading The reading to compare coordinate systems with. Returns: (offset_x, offset_y): The x and y offsets between this reading and the other reading's coordinate systems.
[ "Calculates", "the", "offsets", "between", "readings", "coordinate", "systems", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/astrom.py#L894-L909
OSSOS/MOP
src/ossos/core/ossos/astrom.py
SourceReading.compute_inverted
def compute_inverted(self): """ Returns: inverted: bool True if the stored image is inverted. """ # astheader = storage.get_astheader(self.obs.expnum, self.obs.ccdnum, version=self.obs.ftype) # pvwcs = wcs.WCS(astheader) # (x, y) = pvwcs.sky2xy(self.ra, self.dec) # logger.debug("is_inverted: X,Y {},{} -> wcs X,Y {},{}".format(self.x, self.y, x, y)) # dr2 = ((x-self.x)**2 + (y-self.y)**2) # return dr2 > 2 if self.ssos or self.obs.is_fake() or self.obs.ftype == 's': inverted = False else: inverted = True if self.get_ccd_num() - 1 in INVERTED_CCDS else False logger.debug("Got that {} is_inverted: {}".format(self.obs.rawname, inverted)) return inverted
python
def compute_inverted(self): """ Returns: inverted: bool True if the stored image is inverted. """ # astheader = storage.get_astheader(self.obs.expnum, self.obs.ccdnum, version=self.obs.ftype) # pvwcs = wcs.WCS(astheader) # (x, y) = pvwcs.sky2xy(self.ra, self.dec) # logger.debug("is_inverted: X,Y {},{} -> wcs X,Y {},{}".format(self.x, self.y, x, y)) # dr2 = ((x-self.x)**2 + (y-self.y)**2) # return dr2 > 2 if self.ssos or self.obs.is_fake() or self.obs.ftype == 's': inverted = False else: inverted = True if self.get_ccd_num() - 1 in INVERTED_CCDS else False logger.debug("Got that {} is_inverted: {}".format(self.obs.rawname, inverted)) return inverted
[ "def", "compute_inverted", "(", "self", ")", ":", "# astheader = storage.get_astheader(self.obs.expnum, self.obs.ccdnum, version=self.obs.ftype)", "# pvwcs = wcs.WCS(astheader)", "# (x, y) = pvwcs.sky2xy(self.ra, self.dec)", "# logger.debug(\"is_inverted: X,Y {},{} -> wcs X,Y {},{}\".format(self....
Returns: inverted: bool True if the stored image is inverted.
[ "Returns", ":", "inverted", ":", "bool", "True", "if", "the", "stored", "image", "is", "inverted", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/astrom.py#L953-L971
OSSOS/MOP
src/ossos/core/ossos/astrom.py
Observation.from_source_reference
def from_source_reference(expnum, ccd, x, y): """ Given the location of a source in the image, create an Observation. """ image_uri = storage.dbimages_uri(expnum=expnum, ccd=None, version='p', ext='.fits', subdir=None) logger.debug('Trying to access {}'.format(image_uri)) if not storage.exists(image_uri, force=False): logger.warning('Image not in dbimages? Trying subdir.') image_uri = storage.dbimages_uri(expnum=expnum, ccd=ccd, version='p') if not storage.exists(image_uri, force=False): logger.warning("Image doesn't exist in ccd subdir. %s" % image_uri) return None if x == -9999 or y == -9999: logger.warning("Skipping {} as x/y not resolved.".format(image_uri)) return None mopheader_uri = storage.dbimages_uri(expnum=expnum, ccd=ccd, version='p', ext='.mopheader') if not storage.exists(mopheader_uri, force=False): # ELEVATE! we need to know to go off and reprocess/include this image. logger.critical('Image exists but processing incomplete. Mopheader missing. {}'.format(image_uri)) return None # Build astrom.Observation observation = Observation(expnum=str(expnum), ftype='p', ccdnum=str(ccd), fk="") # JJK commented this out, I think the following line is not true? # observation.rawname = os.path.splitext(os.path.basename(image_uri))[0]+str(ccd).zfill(2) return observation
python
def from_source_reference(expnum, ccd, x, y): """ Given the location of a source in the image, create an Observation. """ image_uri = storage.dbimages_uri(expnum=expnum, ccd=None, version='p', ext='.fits', subdir=None) logger.debug('Trying to access {}'.format(image_uri)) if not storage.exists(image_uri, force=False): logger.warning('Image not in dbimages? Trying subdir.') image_uri = storage.dbimages_uri(expnum=expnum, ccd=ccd, version='p') if not storage.exists(image_uri, force=False): logger.warning("Image doesn't exist in ccd subdir. %s" % image_uri) return None if x == -9999 or y == -9999: logger.warning("Skipping {} as x/y not resolved.".format(image_uri)) return None mopheader_uri = storage.dbimages_uri(expnum=expnum, ccd=ccd, version='p', ext='.mopheader') if not storage.exists(mopheader_uri, force=False): # ELEVATE! we need to know to go off and reprocess/include this image. logger.critical('Image exists but processing incomplete. Mopheader missing. {}'.format(image_uri)) return None # Build astrom.Observation observation = Observation(expnum=str(expnum), ftype='p', ccdnum=str(ccd), fk="") # JJK commented this out, I think the following line is not true? # observation.rawname = os.path.splitext(os.path.basename(image_uri))[0]+str(ccd).zfill(2) return observation
[ "def", "from_source_reference", "(", "expnum", ",", "ccd", ",", "x", ",", "y", ")", ":", "image_uri", "=", "storage", ".", "dbimages_uri", "(", "expnum", "=", "expnum", ",", "ccd", "=", "None", ",", "version", "=", "'p'", ",", "ext", "=", "'.fits'", ...
Given the location of a source in the image, create an Observation.
[ "Given", "the", "location", "of", "a", "source", "in", "the", "image", "create", "an", "Observation", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/astrom.py#L990-L1034
OSSOS/MOP
src/ossos/core/ossos/astrom.py
StreamingVettingWriter.write_source
def write_source(self, source, comment=None, reject=False): """ Writes out data for a single source. """ if not self._header_written: observations = [reading.get_observation() for reading in source.get_readings()] self.write_headers(observations, self.sys_header) self._write_source(source, comment=comment, reject=reject)
python
def write_source(self, source, comment=None, reject=False): """ Writes out data for a single source. """ if not self._header_written: observations = [reading.get_observation() for reading in source.get_readings()] self.write_headers(observations, self.sys_header) self._write_source(source, comment=comment, reject=reject)
[ "def", "write_source", "(", "self", ",", "source", ",", "comment", "=", "None", ",", "reject", "=", "False", ")", ":", "if", "not", "self", ".", "_header_written", ":", "observations", "=", "[", "reading", ".", "get_observation", "(", ")", "for", "readin...
Writes out data for a single source.
[ "Writes", "out", "data", "for", "a", "single", "source", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/astrom.py#L1160-L1168
OSSOS/MOP
src/ossos/core/ossos/coord.py
mjd2gmst
def mjd2gmst(mjd): """Convert Modfied Juian Date (JD = 2400000.5) to GMST Taken from P.T. Walace routines. """ tu = (mjd - MJD0) / (100*DPY) st = math.fmod(mjd, 1.0) * D2PI + (24110.54841 + (8640184.812866 + (0.093104 - 6.2e-6 * tu) * tu) * tu) * DS2R w = math.fmod(st, D2PI) if w >= 0.0: return w else: return w + D2PI
python
def mjd2gmst(mjd): """Convert Modfied Juian Date (JD = 2400000.5) to GMST Taken from P.T. Walace routines. """ tu = (mjd - MJD0) / (100*DPY) st = math.fmod(mjd, 1.0) * D2PI + (24110.54841 + (8640184.812866 + (0.093104 - 6.2e-6 * tu) * tu) * tu) * DS2R w = math.fmod(st, D2PI) if w >= 0.0: return w else: return w + D2PI
[ "def", "mjd2gmst", "(", "mjd", ")", ":", "tu", "=", "(", "mjd", "-", "MJD0", ")", "/", "(", "100", "*", "DPY", ")", "st", "=", "math", ".", "fmod", "(", "mjd", ",", "1.0", ")", "*", "D2PI", "+", "(", "24110.54841", "+", "(", "8640184.812866", ...
Convert Modfied Juian Date (JD = 2400000.5) to GMST Taken from P.T. Walace routines.
[ "Convert", "Modfied", "Juian", "Date", "(", "JD", "=", "2400000", ".", "5", ")", "to", "GMST", "Taken", "from", "P", ".", "T", ".", "Walace", "routines", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/coord.py#L12-L27
JohnVinyard/zounds
zounds/learn/wgan.py
WassersteinGanTrainer._gradient_penalty
def _gradient_penalty(self, real_samples, fake_samples, kwargs): """ Compute the norm of the gradients for each sample in a batch, and penalize anything on either side of unit norm """ import torch from torch.autograd import Variable, grad real_samples = real_samples.view(fake_samples.shape) subset_size = real_samples.shape[0] real_samples = real_samples[:subset_size] fake_samples = fake_samples[:subset_size] alpha = torch.rand(subset_size) if self.use_cuda: alpha = alpha.cuda() alpha = alpha.view((-1,) + ((1,) * (real_samples.dim() - 1))) interpolates = alpha * real_samples + ((1 - alpha) * fake_samples) interpolates = Variable(interpolates, requires_grad=True) if self.use_cuda: interpolates = interpolates.cuda() d_output = self.critic(interpolates, **kwargs) grad_ouputs = torch.ones(d_output.size()) if self.use_cuda: grad_ouputs = grad_ouputs.cuda() gradients = grad( outputs=d_output, inputs=interpolates, grad_outputs=grad_ouputs, create_graph=True, retain_graph=True, only_inputs=True)[0] return ((gradients.norm(2, dim=1) - 1) ** 2).mean() * 10
python
def _gradient_penalty(self, real_samples, fake_samples, kwargs): """ Compute the norm of the gradients for each sample in a batch, and penalize anything on either side of unit norm """ import torch from torch.autograd import Variable, grad real_samples = real_samples.view(fake_samples.shape) subset_size = real_samples.shape[0] real_samples = real_samples[:subset_size] fake_samples = fake_samples[:subset_size] alpha = torch.rand(subset_size) if self.use_cuda: alpha = alpha.cuda() alpha = alpha.view((-1,) + ((1,) * (real_samples.dim() - 1))) interpolates = alpha * real_samples + ((1 - alpha) * fake_samples) interpolates = Variable(interpolates, requires_grad=True) if self.use_cuda: interpolates = interpolates.cuda() d_output = self.critic(interpolates, **kwargs) grad_ouputs = torch.ones(d_output.size()) if self.use_cuda: grad_ouputs = grad_ouputs.cuda() gradients = grad( outputs=d_output, inputs=interpolates, grad_outputs=grad_ouputs, create_graph=True, retain_graph=True, only_inputs=True)[0] return ((gradients.norm(2, dim=1) - 1) ** 2).mean() * 10
[ "def", "_gradient_penalty", "(", "self", ",", "real_samples", ",", "fake_samples", ",", "kwargs", ")", ":", "import", "torch", "from", "torch", ".", "autograd", "import", "Variable", ",", "grad", "real_samples", "=", "real_samples", ".", "view", "(", "fake_sam...
Compute the norm of the gradients for each sample in a batch, and penalize anything on either side of unit norm
[ "Compute", "the", "norm", "of", "the", "gradients", "for", "each", "sample", "in", "a", "batch", "and", "penalize", "anything", "on", "either", "side", "of", "unit", "norm" ]
train
https://github.com/JohnVinyard/zounds/blob/337b3f98753d09eaab1c72dcd37bb852a3fa5ac6/zounds/learn/wgan.py#L61-L99
OSSOS/MOP
src/ossos/core/ossos/pipeline/update_astrometry.py
remeasure
def remeasure(mpc_in, reset_pixel_coordinates=True): """ Compute the RA/DEC of the line based on the X/Y in the comment and the WCS of the associated image. Comment of supplied astrometric line (mpc_in) must be in OSSOSComment format. @param mpc_in: An line of astrometric measurement to recompute the RA/DEC from the X/Y in the comment. @type mpc_in: mp_ephem.Observation @param reset_pixel_coordinates: try and determine correct X/Y is X/Y doesn't map to correct RA/DEC value @type reset_pixel_coordinates: bool @type reset_pixecl_coordinates: bool """ if mpc_in.null_observation: return mpc_in mpc_obs = deepcopy(mpc_in) logging.debug("rm start: {}".format(mpc_obs.to_string())) if not isinstance(mpc_obs.comment, mp_ephem.ephem.OSSOSComment): logging.error("Failed to convert comment line") return mpc_in parts = re.search('(?P<expnum>\d{7})(?P<type>\S)(?P<ccd>\d\d)', str(mpc_obs.comment.frame)) if not parts: logging.error("Failed to parse expnum from frame info in comment line") return mpc_in ccd = int(parts.group('ccd')) expnum = int(parts.group('expnum')) exp_type = parts.group('type') try: header = _connection_error_wrapper(storage._get_sghead, expnum)[ccd+1] except IOError as ioerr: logging.error(str(ioerr)) logging.error("Failed to get astrometric header for: {}".format(mpc_obs)) return mpc_in this_wcs = wcs.WCS(header) coordinate = this_wcs.xy2sky(mpc_obs.comment.x, mpc_obs.comment.y, usepv=True) mpc_obs.coordinate = coordinate[0].to('degree').value, coordinate[1].to('degree').value sep = mpc_in.coordinate.separation(mpc_obs.coordinate) if sep > TOLERANCE*20 and mpc_in.discovery and _flipped_ccd(ccd): logging.warn("Large ({}) offset using X/Y in comment line to compute RA/DEC".format(sep)) if reset_pixel_coordinates: logging.info("flipping/flopping the discvoery x/y position recorded.") x = header['NAXIS1'] - mpc_obs.comment.x + 1 y = header['NAXIS2'] - mpc_obs.comment.y + 1 new_coordinate = this_wcs.xy2sky(x, y, usepv=True) new_sep = mpc_in.coordinate.separation(new_coordinate) if new_sep < TOLERANCE*2: mpc_obs.coordinate = new_coordinate mpc_obs.comment.x = x mpc_obs.comment.y = y sep = new_sep if sep > TOLERANCE: # use the old header RA/DEC to predict the X/Y and then use that X/Y to get new RA/DEC logging.warn("sep: {} --> large offset when using comment line X/Y to compute RA/DEC") if reset_pixel_coordinates: logging.warn("Using RA/DEC and original WCS to compute X/Y and replacing X/Y in comment.".format(sep)) header2 = _connection_error_wrapper(storage.get_astheader, expnum, ccd) image_wcs = wcs.WCS(header2) (x, y) = image_wcs.sky2xy(mpc_in.coordinate.ra.degree, mpc_in.coordinate.dec.degree, usepv=False) mpc_obs.coordinate = this_wcs.xy2sky(x, y, usepv=True) mpc_obs.comment.x = x mpc_obs.comment.y = y logging.info("Coordinate changed: ({:5.2f},{:5.2f}) --> ({:5.2f},{:5.2f})".format(mpc_obs.comment.x, mpc_obs.comment.y, x, y)) if mpc_obs.comment.mag_uncertainty is not None: try: merr = float(mpc_obs.comment.mag_uncertainty) fwhm = float(_connection_error_wrapper(storage.get_fwhm, expnum, ccd)) centroid_err = merr * fwhm * header['PIXSCAL1'] logging.debug("Centroid uncertainty: {} {} => {}".format(merr, fwhm, centroid_err)) except Exception as err: logging.error(str(err)) logging.error("Failed to compute centroid_err for observation:\n" "{}\nUsing default of 0.2".format(mpc_obs.to_string())) centroid_err = 0.2 else: centroid_err = 0.2 mpc_obs.comment.astrometric_level = header.get('ASTLEVEL', "0") try: asterr = float(header['ASTERR']) residuals = (asterr ** 2 + centroid_err ** 2) ** 0.5 logging.debug("Residuals: {} {} => {}".format(asterr, centroid_err, residuals)) except Exception as err: logging.error(str(err)) logging.error("Failed while trying to compute plate uncertainty for\n{}".format(mpc_obs.to_string())) logging.error('Using default of 0.25') residuals = 0.25 mpc_obs.comment.plate_uncertainty = residuals logging.debug("sending back: {}".format(mpc_obs.to_string())) return mpc_obs
python
def remeasure(mpc_in, reset_pixel_coordinates=True): """ Compute the RA/DEC of the line based on the X/Y in the comment and the WCS of the associated image. Comment of supplied astrometric line (mpc_in) must be in OSSOSComment format. @param mpc_in: An line of astrometric measurement to recompute the RA/DEC from the X/Y in the comment. @type mpc_in: mp_ephem.Observation @param reset_pixel_coordinates: try and determine correct X/Y is X/Y doesn't map to correct RA/DEC value @type reset_pixel_coordinates: bool @type reset_pixecl_coordinates: bool """ if mpc_in.null_observation: return mpc_in mpc_obs = deepcopy(mpc_in) logging.debug("rm start: {}".format(mpc_obs.to_string())) if not isinstance(mpc_obs.comment, mp_ephem.ephem.OSSOSComment): logging.error("Failed to convert comment line") return mpc_in parts = re.search('(?P<expnum>\d{7})(?P<type>\S)(?P<ccd>\d\d)', str(mpc_obs.comment.frame)) if not parts: logging.error("Failed to parse expnum from frame info in comment line") return mpc_in ccd = int(parts.group('ccd')) expnum = int(parts.group('expnum')) exp_type = parts.group('type') try: header = _connection_error_wrapper(storage._get_sghead, expnum)[ccd+1] except IOError as ioerr: logging.error(str(ioerr)) logging.error("Failed to get astrometric header for: {}".format(mpc_obs)) return mpc_in this_wcs = wcs.WCS(header) coordinate = this_wcs.xy2sky(mpc_obs.comment.x, mpc_obs.comment.y, usepv=True) mpc_obs.coordinate = coordinate[0].to('degree').value, coordinate[1].to('degree').value sep = mpc_in.coordinate.separation(mpc_obs.coordinate) if sep > TOLERANCE*20 and mpc_in.discovery and _flipped_ccd(ccd): logging.warn("Large ({}) offset using X/Y in comment line to compute RA/DEC".format(sep)) if reset_pixel_coordinates: logging.info("flipping/flopping the discvoery x/y position recorded.") x = header['NAXIS1'] - mpc_obs.comment.x + 1 y = header['NAXIS2'] - mpc_obs.comment.y + 1 new_coordinate = this_wcs.xy2sky(x, y, usepv=True) new_sep = mpc_in.coordinate.separation(new_coordinate) if new_sep < TOLERANCE*2: mpc_obs.coordinate = new_coordinate mpc_obs.comment.x = x mpc_obs.comment.y = y sep = new_sep if sep > TOLERANCE: # use the old header RA/DEC to predict the X/Y and then use that X/Y to get new RA/DEC logging.warn("sep: {} --> large offset when using comment line X/Y to compute RA/DEC") if reset_pixel_coordinates: logging.warn("Using RA/DEC and original WCS to compute X/Y and replacing X/Y in comment.".format(sep)) header2 = _connection_error_wrapper(storage.get_astheader, expnum, ccd) image_wcs = wcs.WCS(header2) (x, y) = image_wcs.sky2xy(mpc_in.coordinate.ra.degree, mpc_in.coordinate.dec.degree, usepv=False) mpc_obs.coordinate = this_wcs.xy2sky(x, y, usepv=True) mpc_obs.comment.x = x mpc_obs.comment.y = y logging.info("Coordinate changed: ({:5.2f},{:5.2f}) --> ({:5.2f},{:5.2f})".format(mpc_obs.comment.x, mpc_obs.comment.y, x, y)) if mpc_obs.comment.mag_uncertainty is not None: try: merr = float(mpc_obs.comment.mag_uncertainty) fwhm = float(_connection_error_wrapper(storage.get_fwhm, expnum, ccd)) centroid_err = merr * fwhm * header['PIXSCAL1'] logging.debug("Centroid uncertainty: {} {} => {}".format(merr, fwhm, centroid_err)) except Exception as err: logging.error(str(err)) logging.error("Failed to compute centroid_err for observation:\n" "{}\nUsing default of 0.2".format(mpc_obs.to_string())) centroid_err = 0.2 else: centroid_err = 0.2 mpc_obs.comment.astrometric_level = header.get('ASTLEVEL', "0") try: asterr = float(header['ASTERR']) residuals = (asterr ** 2 + centroid_err ** 2) ** 0.5 logging.debug("Residuals: {} {} => {}".format(asterr, centroid_err, residuals)) except Exception as err: logging.error(str(err)) logging.error("Failed while trying to compute plate uncertainty for\n{}".format(mpc_obs.to_string())) logging.error('Using default of 0.25') residuals = 0.25 mpc_obs.comment.plate_uncertainty = residuals logging.debug("sending back: {}".format(mpc_obs.to_string())) return mpc_obs
[ "def", "remeasure", "(", "mpc_in", ",", "reset_pixel_coordinates", "=", "True", ")", ":", "if", "mpc_in", ".", "null_observation", ":", "return", "mpc_in", "mpc_obs", "=", "deepcopy", "(", "mpc_in", ")", "logging", ".", "debug", "(", "\"rm start: {}\"", ".", ...
Compute the RA/DEC of the line based on the X/Y in the comment and the WCS of the associated image. Comment of supplied astrometric line (mpc_in) must be in OSSOSComment format. @param mpc_in: An line of astrometric measurement to recompute the RA/DEC from the X/Y in the comment. @type mpc_in: mp_ephem.Observation @param reset_pixel_coordinates: try and determine correct X/Y is X/Y doesn't map to correct RA/DEC value @type reset_pixel_coordinates: bool @type reset_pixecl_coordinates: bool
[ "Compute", "the", "RA", "/", "DEC", "of", "the", "line", "based", "on", "the", "X", "/", "Y", "in", "the", "comment", "and", "the", "WCS", "of", "the", "associated", "image", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/pipeline/update_astrometry.py#L37-L139
OSSOS/MOP
src/ossos/core/ossos/pipeline/update_astrometry.py
_connection_error_wrapper
def _connection_error_wrapper(func, *args, **kwargs): """ Wrap a call to func in a try/except that repeats on ConnectionError @param func: @param args: @param kwargs: @return: """ counter = 0 while counter < 5: try: result = func(*args, **kwargs) return result except Exception as ex: time.sleep(5) counter += 1 logging.warning(str(ex))
python
def _connection_error_wrapper(func, *args, **kwargs): """ Wrap a call to func in a try/except that repeats on ConnectionError @param func: @param args: @param kwargs: @return: """ counter = 0 while counter < 5: try: result = func(*args, **kwargs) return result except Exception as ex: time.sleep(5) counter += 1 logging.warning(str(ex))
[ "def", "_connection_error_wrapper", "(", "func", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "counter", "=", "0", "while", "counter", "<", "5", ":", "try", ":", "result", "=", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return...
Wrap a call to func in a try/except that repeats on ConnectionError @param func: @param args: @param kwargs: @return:
[ "Wrap", "a", "call", "to", "func", "in", "a", "try", "/", "except", "that", "repeats", "on", "ConnectionError" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/pipeline/update_astrometry.py#L142-L159
OSSOS/MOP
src/ossos/core/ossos/pipeline/update_astrometry.py
recompute_mag
def recompute_mag(mpc_in, skip_centroids=False): """ Get the mag of the object given the mp_ephem.ephem.Observation """ # TODO this really shouldn't need to build a 'reading' to get the cutout... from ossos.downloads.cutouts import downloader dlm = downloader.ImageCutoutDownloader() mpc_obs = deepcopy(mpc_in) assert isinstance(mpc_obs, mp_ephem.ephem.Observation) assert isinstance(mpc_obs.comment, mp_ephem.ephem.OSSOSComment) if mpc_obs.null_observation: return mpc_obs parts = re.search('(?P<expnum>\d{7})(?P<type>\S)(?P<ccd>\d\d)', mpc_obs.comment.frame) if parts is None: return mpc_obs expnum = parts.group('expnum') ccd = parts.group('ccd') file_type = parts.group('type') observation = astrom.Observation(expnum, file_type, ccd) assert isinstance(observation, astrom.Observation) ast_header = _connection_error_wrapper(storage._get_sghead, int(expnum))[int(ccd)+1] filter_value = None for keyword in ['FILTER', 'FILT1 NAME']: filter_value = ast_header.get(keyword, None) if filter_value is not None: if filter_value.startswith('gri'): filter_value = 'w' else: filter_value = filter_value[0] break # The ZP for the current astrometric lines is the pipeline one. The new ZP is in the astheader file. new_zp = ast_header.get('PHOTZP') # The .zeropoint.used value is likely the one used for the original photometry. old_zp = _connection_error_wrapper(storage.get_zeropoint, int(expnum), int(ccd)) reading = astrom.SourceReading(float(mpc_obs.comment.x), float(mpc_obs.comment.y), float(mpc_obs.comment.x), float(mpc_obs.comment.y), mpc_obs.coordinate.ra.degree, mpc_obs.coordinate.dec.degree, float(mpc_obs.comment.x), float(mpc_obs.comment.y), observation, ssos=True, from_input_file=True, null_observation=False, discovery=mpc_obs.discovery) cutout = _connection_error_wrapper(dlm.download_cutout, reading, needs_apcor=True) cutout._zmag = new_zp if math.fabs(cutout.zmag - old_zp) > 0.3: logging.warn("Large change in zeropoint detected: {} -> {}".format(old_zp, cutout.zmag)) try: PHOT = cutout.get_observed_magnitude(centroid=not skip_centroids and mpc_obs.note1 != "H") x = PHOT['XCENTER'] y = PHOT['YCENTER'] mag = PHOT['MAG'] merr = PHOT['MERR'] cutout.update_pixel_location((x, y), hdu_index=cutout.extno) x, y = cutout.observed_source_point except Exception as ex: logging.error("ERROR: {}".format(str(ex))) return mpc_obs try: if mpc_obs.comment.mag_uncertainty is not None and mpc_obs.comment.mag is not None and math.fabs(mpc_obs.comment.mag - mag) > 3.5 * mpc_obs.comment.mag_uncertainty: logging.warn("recomputed magnitude shift large: {} --> {}".format(mpc_obs.mag, mag[0])) if math.sqrt((x.value - mpc_obs.comment.x) ** 2 + (y.value - mpc_obs.comment.y) ** 2) > 1.9: logging.warn("Centroid shifted ({},{}) -> ({},{})".format(mpc_obs.comment.x, mpc_obs.comment.y, x.value, y.value)) except Exception as ex: logging.error(str(ex)) # Don't use the new X/Y for Hand measured entries. (although call to get_observed_magnitude should have changed) if str(mpc_obs.note1) != "H" and not skip_centroids: mpc_obs.comment.x = x.value mpc_obs.comment.y = y.value try: mag = float(mag) except: return mpc_obs if math.isnan(mag): return mpc_obs if mag > 10: mpc_obs._band = filter_value mpc_obs.comment.mag = mag mpc_obs.comment.mag_uncertainty = merr # Update the mpc record magnitude if previous value existed here. if (mpc_obs.mag is not None or (mpc_obs.mag is None and mpc_in.comment.photometry_note[0] == "Z")) and mag > 10: mpc_obs.mag = mag return mpc_obs
python
def recompute_mag(mpc_in, skip_centroids=False): """ Get the mag of the object given the mp_ephem.ephem.Observation """ # TODO this really shouldn't need to build a 'reading' to get the cutout... from ossos.downloads.cutouts import downloader dlm = downloader.ImageCutoutDownloader() mpc_obs = deepcopy(mpc_in) assert isinstance(mpc_obs, mp_ephem.ephem.Observation) assert isinstance(mpc_obs.comment, mp_ephem.ephem.OSSOSComment) if mpc_obs.null_observation: return mpc_obs parts = re.search('(?P<expnum>\d{7})(?P<type>\S)(?P<ccd>\d\d)', mpc_obs.comment.frame) if parts is None: return mpc_obs expnum = parts.group('expnum') ccd = parts.group('ccd') file_type = parts.group('type') observation = astrom.Observation(expnum, file_type, ccd) assert isinstance(observation, astrom.Observation) ast_header = _connection_error_wrapper(storage._get_sghead, int(expnum))[int(ccd)+1] filter_value = None for keyword in ['FILTER', 'FILT1 NAME']: filter_value = ast_header.get(keyword, None) if filter_value is not None: if filter_value.startswith('gri'): filter_value = 'w' else: filter_value = filter_value[0] break # The ZP for the current astrometric lines is the pipeline one. The new ZP is in the astheader file. new_zp = ast_header.get('PHOTZP') # The .zeropoint.used value is likely the one used for the original photometry. old_zp = _connection_error_wrapper(storage.get_zeropoint, int(expnum), int(ccd)) reading = astrom.SourceReading(float(mpc_obs.comment.x), float(mpc_obs.comment.y), float(mpc_obs.comment.x), float(mpc_obs.comment.y), mpc_obs.coordinate.ra.degree, mpc_obs.coordinate.dec.degree, float(mpc_obs.comment.x), float(mpc_obs.comment.y), observation, ssos=True, from_input_file=True, null_observation=False, discovery=mpc_obs.discovery) cutout = _connection_error_wrapper(dlm.download_cutout, reading, needs_apcor=True) cutout._zmag = new_zp if math.fabs(cutout.zmag - old_zp) > 0.3: logging.warn("Large change in zeropoint detected: {} -> {}".format(old_zp, cutout.zmag)) try: PHOT = cutout.get_observed_magnitude(centroid=not skip_centroids and mpc_obs.note1 != "H") x = PHOT['XCENTER'] y = PHOT['YCENTER'] mag = PHOT['MAG'] merr = PHOT['MERR'] cutout.update_pixel_location((x, y), hdu_index=cutout.extno) x, y = cutout.observed_source_point except Exception as ex: logging.error("ERROR: {}".format(str(ex))) return mpc_obs try: if mpc_obs.comment.mag_uncertainty is not None and mpc_obs.comment.mag is not None and math.fabs(mpc_obs.comment.mag - mag) > 3.5 * mpc_obs.comment.mag_uncertainty: logging.warn("recomputed magnitude shift large: {} --> {}".format(mpc_obs.mag, mag[0])) if math.sqrt((x.value - mpc_obs.comment.x) ** 2 + (y.value - mpc_obs.comment.y) ** 2) > 1.9: logging.warn("Centroid shifted ({},{}) -> ({},{})".format(mpc_obs.comment.x, mpc_obs.comment.y, x.value, y.value)) except Exception as ex: logging.error(str(ex)) # Don't use the new X/Y for Hand measured entries. (although call to get_observed_magnitude should have changed) if str(mpc_obs.note1) != "H" and not skip_centroids: mpc_obs.comment.x = x.value mpc_obs.comment.y = y.value try: mag = float(mag) except: return mpc_obs if math.isnan(mag): return mpc_obs if mag > 10: mpc_obs._band = filter_value mpc_obs.comment.mag = mag mpc_obs.comment.mag_uncertainty = merr # Update the mpc record magnitude if previous value existed here. if (mpc_obs.mag is not None or (mpc_obs.mag is None and mpc_in.comment.photometry_note[0] == "Z")) and mag > 10: mpc_obs.mag = mag return mpc_obs
[ "def", "recompute_mag", "(", "mpc_in", ",", "skip_centroids", "=", "False", ")", ":", "# TODO this really shouldn't need to build a 'reading' to get the cutout...", "from", "ossos", ".", "downloads", ".", "cutouts", "import", "downloader", "dlm", "=", "downloader", ".", ...
Get the mag of the object given the mp_ephem.ephem.Observation
[ "Get", "the", "mag", "of", "the", "object", "given", "the", "mp_ephem", ".", "ephem", ".", "Observation" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/pipeline/update_astrometry.py#L162-L262
OSSOS/MOP
src/ossos/core/ossos/pipeline/update_astrometry.py
compare_orbits
def compare_orbits(original_obs, modified_obs, cor_file): """Compare the orbit fit given the oringal and modified astrometry.""" origin = orbfit.Orbfit(original_obs) modified = orbfit.Orbfit(modified_obs) orbpt = file(cor_file+".orb", 'w') # Dump summaries of the orbits orbpt.write("#"*80+"\n") orbpt.write("# ORIGINAL ORBIT\n") orbpt.write(origin.summarize()+"\n") orbpt.write("#"*80+"\n") orbpt.write("# MODIFIED ORBIT\n") orbpt.write(modified.summarize()+"\n") orbpt.write("#"*80+"\n") # Create a report of the change in orbit parameter uncertainty for element in ['a', 'e', 'inc', 'om', 'Node', 'T']: oval = getattr(origin, element).value doval = getattr(origin, "d"+element).value mval = getattr(modified, element).value dmval = getattr(modified, "d"+element).value precision = max(int(-1*math.floor(math.log10(dmval))), int(-1*math.floor(math.log10(doval)))) + 1 precision = max(0, precision) vdigits = 12 ddigits = 6 vpadding = " "*int(vdigits-precision) dpadding = " "*int(ddigits-precision) orbpt.write("{element:>5s}: " "{oval[0]:>{vdigits}.{vdigits}}.{oval[1]:<{precision}.{precision}} {vpadding} +/- " "{doval[0]:>{ddigits}.{ddigits}}.{doval[1]:<{precision}.{precision}} {dpadding} ==> " "{mval[0]:>{vdigits}.{vdigits}}.{mval[1]:<{precision}.{precision}} {vpadding} +/- " "{dmval[0]:>{ddigits}.{ddigits}}.{dmval[1]:<{precision}.{precision}}\n".format( element=element, dpadding=dpadding, vpadding=vpadding, vdigits=vdigits, ddigits=ddigits, oval="{:12.12f}".format(oval).split("."), doval="{:12.12f}".format(doval).split("."), mval="{:12.12f}".format(mval).split("."), dmval="{:12.12f}".format(dmval).split("."), precision=precision) ) delta = math.fabs(oval - mval) if delta > 3.5 * doval: logging.warn("large delta for element {}: {} --> {}".format(element, oval, mval)) # Compute the stdev of the residuals and report the change given the new observations orbpt.write("*"*80+"\n") orbpt.write("Change in orbital parameters \n") sep = "Change in scatter between initial and recalibrated obseravtions. \n" for orb in [origin, modified]: orbpt.write(sep) sep = "\n ==> becomes ==> \n" residuals = orb.residuals dra = [] ddec = [] mags = {} for observation in orb.observations: if not observation.null_observation: dra.append(observation.ra_residual) ddec.append(observation.dec_residual) filter = observation.band if filter is not None: if filter not in mags: mags[filter] = [] try: mags[filter].append(float(observation.mag)) except: pass if observation.comment.plate_uncertainty * 5.0 < \ ((observation.ra_residual ** 2 + observation.dec_residual ** 2) ** 0.5): logging.warn("LARGE RESIDUAL ON: {}".format(observation.to_string())) logging.warn("Fit residual unreasonably large.") dra = numpy.array(dra) ddec = numpy.array(ddec) merr_str = "" for filter in mags: mag = numpy.percentile(numpy.array(mags[filter]), (50)) mags[filter] = numpy.percentile(numpy.array(mags[filter]), (5,95)) merr = (mags[filter][1] - mags[filter][0])/6.0 merr_str += " {}: {:8.2f} +/- {:8.2f}".format(filter, mag, merr) orbpt.write("ra_std:{:8.4} dec_std:{:8.4} mag: {}".format(dra.std(), ddec.std(), merr_str)) orbpt.write("\n") orbpt.close()
python
def compare_orbits(original_obs, modified_obs, cor_file): """Compare the orbit fit given the oringal and modified astrometry.""" origin = orbfit.Orbfit(original_obs) modified = orbfit.Orbfit(modified_obs) orbpt = file(cor_file+".orb", 'w') # Dump summaries of the orbits orbpt.write("#"*80+"\n") orbpt.write("# ORIGINAL ORBIT\n") orbpt.write(origin.summarize()+"\n") orbpt.write("#"*80+"\n") orbpt.write("# MODIFIED ORBIT\n") orbpt.write(modified.summarize()+"\n") orbpt.write("#"*80+"\n") # Create a report of the change in orbit parameter uncertainty for element in ['a', 'e', 'inc', 'om', 'Node', 'T']: oval = getattr(origin, element).value doval = getattr(origin, "d"+element).value mval = getattr(modified, element).value dmval = getattr(modified, "d"+element).value precision = max(int(-1*math.floor(math.log10(dmval))), int(-1*math.floor(math.log10(doval)))) + 1 precision = max(0, precision) vdigits = 12 ddigits = 6 vpadding = " "*int(vdigits-precision) dpadding = " "*int(ddigits-precision) orbpt.write("{element:>5s}: " "{oval[0]:>{vdigits}.{vdigits}}.{oval[1]:<{precision}.{precision}} {vpadding} +/- " "{doval[0]:>{ddigits}.{ddigits}}.{doval[1]:<{precision}.{precision}} {dpadding} ==> " "{mval[0]:>{vdigits}.{vdigits}}.{mval[1]:<{precision}.{precision}} {vpadding} +/- " "{dmval[0]:>{ddigits}.{ddigits}}.{dmval[1]:<{precision}.{precision}}\n".format( element=element, dpadding=dpadding, vpadding=vpadding, vdigits=vdigits, ddigits=ddigits, oval="{:12.12f}".format(oval).split("."), doval="{:12.12f}".format(doval).split("."), mval="{:12.12f}".format(mval).split("."), dmval="{:12.12f}".format(dmval).split("."), precision=precision) ) delta = math.fabs(oval - mval) if delta > 3.5 * doval: logging.warn("large delta for element {}: {} --> {}".format(element, oval, mval)) # Compute the stdev of the residuals and report the change given the new observations orbpt.write("*"*80+"\n") orbpt.write("Change in orbital parameters \n") sep = "Change in scatter between initial and recalibrated obseravtions. \n" for orb in [origin, modified]: orbpt.write(sep) sep = "\n ==> becomes ==> \n" residuals = orb.residuals dra = [] ddec = [] mags = {} for observation in orb.observations: if not observation.null_observation: dra.append(observation.ra_residual) ddec.append(observation.dec_residual) filter = observation.band if filter is not None: if filter not in mags: mags[filter] = [] try: mags[filter].append(float(observation.mag)) except: pass if observation.comment.plate_uncertainty * 5.0 < \ ((observation.ra_residual ** 2 + observation.dec_residual ** 2) ** 0.5): logging.warn("LARGE RESIDUAL ON: {}".format(observation.to_string())) logging.warn("Fit residual unreasonably large.") dra = numpy.array(dra) ddec = numpy.array(ddec) merr_str = "" for filter in mags: mag = numpy.percentile(numpy.array(mags[filter]), (50)) mags[filter] = numpy.percentile(numpy.array(mags[filter]), (5,95)) merr = (mags[filter][1] - mags[filter][0])/6.0 merr_str += " {}: {:8.2f} +/- {:8.2f}".format(filter, mag, merr) orbpt.write("ra_std:{:8.4} dec_std:{:8.4} mag: {}".format(dra.std(), ddec.std(), merr_str)) orbpt.write("\n") orbpt.close()
[ "def", "compare_orbits", "(", "original_obs", ",", "modified_obs", ",", "cor_file", ")", ":", "origin", "=", "orbfit", ".", "Orbfit", "(", "original_obs", ")", "modified", "=", "orbfit", ".", "Orbfit", "(", "modified_obs", ")", "orbpt", "=", "file", "(", "...
Compare the orbit fit given the oringal and modified astrometry.
[ "Compare", "the", "orbit", "fit", "given", "the", "oringal", "and", "modified", "astrometry", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/pipeline/update_astrometry.py#L343-L432
JohnVinyard/zounds
zounds/timeseries/audiosamples.py
AudioSamples.mono
def mono(self): """ Return this instance summed to mono. If the instance is already mono, this is a no-op. """ if self.channels == 1: return self x = self.sum(axis=1) * 0.5 y = x * 0.5 return AudioSamples(y, self.samplerate)
python
def mono(self): """ Return this instance summed to mono. If the instance is already mono, this is a no-op. """ if self.channels == 1: return self x = self.sum(axis=1) * 0.5 y = x * 0.5 return AudioSamples(y, self.samplerate)
[ "def", "mono", "(", "self", ")", ":", "if", "self", ".", "channels", "==", "1", ":", "return", "self", "x", "=", "self", ".", "sum", "(", "axis", "=", "1", ")", "*", "0.5", "y", "=", "x", "*", "0.5", "return", "AudioSamples", "(", "y", ",", "...
Return this instance summed to mono. If the instance is already mono, this is a no-op.
[ "Return", "this", "instance", "summed", "to", "mono", ".", "If", "the", "instance", "is", "already", "mono", "this", "is", "a", "no", "-", "op", "." ]
train
https://github.com/JohnVinyard/zounds/blob/337b3f98753d09eaab1c72dcd37bb852a3fa5ac6/zounds/timeseries/audiosamples.py#L149-L158
JohnVinyard/zounds
zounds/timeseries/audiosamples.py
AudioSamples.encode
def encode(self, flo=None, fmt='WAV', subtype='PCM_16'): """ Return audio samples encoded as bytes given a particular audio format Args: flo (file-like): A file-like object to write the bytes to. If flo is not supplied, a new :class:`io.BytesIO` instance will be created and returned fmt (str): A libsndfile-friendly identifier for an audio encoding (detailed here: http://www.mega-nerd.com/libsndfile/api.html) subtype (str): A libsndfile-friendly identifier for an audio encoding subtype (detailed here: http://www.mega-nerd.com/libsndfile/api.html) Examples: >>> from zounds import SR11025, AudioSamples >>> import numpy as np >>> silence = np.zeros(11025*10) >>> samples = AudioSamples(silence, SR11025()) >>> bio = samples.encode() >>> bio.read(10) 'RIFFx]\\x03\\x00WA' """ flo = flo or BytesIO() with SoundFile( flo, mode='w', channels=self.channels, format=fmt, subtype=subtype, samplerate=self.samples_per_second) as f: if fmt == 'OGG': # KLUDGE: Trying to write too-large chunks to an ogg file seems # to cause a segfault in libsndfile # KLUDGE: This logic is very similar to logic in the OggVorbis # processing node, and should probably be factored into a common # location factor = 20 chunksize = self.samples_per_second * factor for i in range(0, len(self), chunksize): chunk = self[i: i + chunksize] f.write(chunk) else: # write everything in one chunk f.write(self) flo.seek(0) return flo
python
def encode(self, flo=None, fmt='WAV', subtype='PCM_16'): """ Return audio samples encoded as bytes given a particular audio format Args: flo (file-like): A file-like object to write the bytes to. If flo is not supplied, a new :class:`io.BytesIO` instance will be created and returned fmt (str): A libsndfile-friendly identifier for an audio encoding (detailed here: http://www.mega-nerd.com/libsndfile/api.html) subtype (str): A libsndfile-friendly identifier for an audio encoding subtype (detailed here: http://www.mega-nerd.com/libsndfile/api.html) Examples: >>> from zounds import SR11025, AudioSamples >>> import numpy as np >>> silence = np.zeros(11025*10) >>> samples = AudioSamples(silence, SR11025()) >>> bio = samples.encode() >>> bio.read(10) 'RIFFx]\\x03\\x00WA' """ flo = flo or BytesIO() with SoundFile( flo, mode='w', channels=self.channels, format=fmt, subtype=subtype, samplerate=self.samples_per_second) as f: if fmt == 'OGG': # KLUDGE: Trying to write too-large chunks to an ogg file seems # to cause a segfault in libsndfile # KLUDGE: This logic is very similar to logic in the OggVorbis # processing node, and should probably be factored into a common # location factor = 20 chunksize = self.samples_per_second * factor for i in range(0, len(self), chunksize): chunk = self[i: i + chunksize] f.write(chunk) else: # write everything in one chunk f.write(self) flo.seek(0) return flo
[ "def", "encode", "(", "self", ",", "flo", "=", "None", ",", "fmt", "=", "'WAV'", ",", "subtype", "=", "'PCM_16'", ")", ":", "flo", "=", "flo", "or", "BytesIO", "(", ")", "with", "SoundFile", "(", "flo", ",", "mode", "=", "'w'", ",", "channels", "...
Return audio samples encoded as bytes given a particular audio format Args: flo (file-like): A file-like object to write the bytes to. If flo is not supplied, a new :class:`io.BytesIO` instance will be created and returned fmt (str): A libsndfile-friendly identifier for an audio encoding (detailed here: http://www.mega-nerd.com/libsndfile/api.html) subtype (str): A libsndfile-friendly identifier for an audio encoding subtype (detailed here: http://www.mega-nerd.com/libsndfile/api.html) Examples: >>> from zounds import SR11025, AudioSamples >>> import numpy as np >>> silence = np.zeros(11025*10) >>> samples = AudioSamples(silence, SR11025()) >>> bio = samples.encode() >>> bio.read(10) 'RIFFx]\\x03\\x00WA'
[ "Return", "audio", "samples", "encoded", "as", "bytes", "given", "a", "particular", "audio", "format" ]
train
https://github.com/JohnVinyard/zounds/blob/337b3f98753d09eaab1c72dcd37bb852a3fa5ac6/zounds/timeseries/audiosamples.py#L181-L229
playpauseandstop/rororo
rororo/schemas/validators.py
extend_with_default
def extend_with_default(validator_class: Any) -> Any: """Append defaults from schema to instance need to be validated. :param validator_class: Apply the change for given validator class. """ validate_properties = validator_class.VALIDATORS['properties'] def set_defaults(validator: Any, properties: dict, instance: dict, schema: dict) -> Iterator[ValidationError]: for prop, subschema in properties.items(): if 'default' in subschema: instance.setdefault(prop, subschema['default']) for error in validate_properties( validator, properties, instance, schema, ): yield error # pragma: no cover return extend(validator_class, {'properties': set_defaults})
python
def extend_with_default(validator_class: Any) -> Any: """Append defaults from schema to instance need to be validated. :param validator_class: Apply the change for given validator class. """ validate_properties = validator_class.VALIDATORS['properties'] def set_defaults(validator: Any, properties: dict, instance: dict, schema: dict) -> Iterator[ValidationError]: for prop, subschema in properties.items(): if 'default' in subschema: instance.setdefault(prop, subschema['default']) for error in validate_properties( validator, properties, instance, schema, ): yield error # pragma: no cover return extend(validator_class, {'properties': set_defaults})
[ "def", "extend_with_default", "(", "validator_class", ":", "Any", ")", "->", "Any", ":", "validate_properties", "=", "validator_class", ".", "VALIDATORS", "[", "'properties'", "]", "def", "set_defaults", "(", "validator", ":", "Any", ",", "properties", ":", "dic...
Append defaults from schema to instance need to be validated. :param validator_class: Apply the change for given validator class.
[ "Append", "defaults", "from", "schema", "to", "instance", "need", "to", "be", "validated", "." ]
train
https://github.com/playpauseandstop/rororo/blob/28a04e8028c29647941e727116335e9d6fd64c27/rororo/schemas/validators.py#L33-L53
JohnVinyard/zounds
zounds/spectral/weighting.py
FrequencyWeighting.weights
def weights(self, other): """ Compute weights, given a scale or time-frequency representation :param other: A time-frequency representation, or a scale :return: a numpy array of weights """ try: return self._wdata(other) except AttributeError: frequency_dim = other.dimensions[-1] return self._wdata(frequency_dim.scale)
python
def weights(self, other): """ Compute weights, given a scale or time-frequency representation :param other: A time-frequency representation, or a scale :return: a numpy array of weights """ try: return self._wdata(other) except AttributeError: frequency_dim = other.dimensions[-1] return self._wdata(frequency_dim.scale)
[ "def", "weights", "(", "self", ",", "other", ")", ":", "try", ":", "return", "self", ".", "_wdata", "(", "other", ")", "except", "AttributeError", ":", "frequency_dim", "=", "other", ".", "dimensions", "[", "-", "1", "]", "return", "self", ".", "_wdata...
Compute weights, given a scale or time-frequency representation :param other: A time-frequency representation, or a scale :return: a numpy array of weights
[ "Compute", "weights", "given", "a", "scale", "or", "time", "-", "frequency", "representation", ":", "param", "other", ":", "A", "time", "-", "frequency", "representation", "or", "a", "scale", ":", "return", ":", "a", "numpy", "array", "of", "weights" ]
train
https://github.com/JohnVinyard/zounds/blob/337b3f98753d09eaab1c72dcd37bb852a3fa5ac6/zounds/spectral/weighting.py#L15-L25
OSSOS/MOP
src/ossos/core/scripts/rate_angle_check.py
rates_angles
def rates_angles(fk_candidate_observations): """ :param fk_candidate_observations: name of the fk*reals.astrom file to check against Object.planted """ detections = fk_candidate_observations.get_sources() for detection in detections: measures = detection.get_readings() for measure in measures: def main(): parser = argparse.ArgumentParser() parser.add_argument('--astrom-filename', default=None, help="Give the astrom file directly instead of looking-up " "using the field/ccd naming scheme.") parser.add_argument('--reals', action='store_true', default=False) parser.add_argument('--type', choices=['o', 'p', 's'], help="Which type of image.", default='s') parser.add_argument('--measure3', default='vos:OSSOS/measure3/2013B-L_redo/') parser.add_argument('--dbimages', default=None) parser.add_argument('--dry-run', action='store_true', default=False) parser.add_argument('--force', action='store_true', default=False) args = parser.parse_args() logging.basicConfig(level=logging.INFO) prefix = 'fk' ext = args.reals and 'reals' or 'cands' storage.MEASURE3 = args.measure3 if args.dbimages is not None: storage.DBIMAGES = args.dbimages astrom.DATASET_ROOT = args.dbimages astrom_uri = storage.get_cands_uri(args.field, ccd=args.ccd, version=args.type, prefix=prefix, ext="measure3.{}.astrom".format(ext)) if args.astrom_filename is None: astrom_filename = os.path.basename(astrom_uri) else: astrom_filename = args.astrom_filename if not os.access(astrom_filename, os.F_OK): astrom_filename = os.path.dirname(astrom_uri) + "/" + astrom_filename # Load the list of astrometric observations that will be looked at. fk_candidate_observations = astrom.parse(astrom_filename)
python
def rates_angles(fk_candidate_observations): """ :param fk_candidate_observations: name of the fk*reals.astrom file to check against Object.planted """ detections = fk_candidate_observations.get_sources() for detection in detections: measures = detection.get_readings() for measure in measures: def main(): parser = argparse.ArgumentParser() parser.add_argument('--astrom-filename', default=None, help="Give the astrom file directly instead of looking-up " "using the field/ccd naming scheme.") parser.add_argument('--reals', action='store_true', default=False) parser.add_argument('--type', choices=['o', 'p', 's'], help="Which type of image.", default='s') parser.add_argument('--measure3', default='vos:OSSOS/measure3/2013B-L_redo/') parser.add_argument('--dbimages', default=None) parser.add_argument('--dry-run', action='store_true', default=False) parser.add_argument('--force', action='store_true', default=False) args = parser.parse_args() logging.basicConfig(level=logging.INFO) prefix = 'fk' ext = args.reals and 'reals' or 'cands' storage.MEASURE3 = args.measure3 if args.dbimages is not None: storage.DBIMAGES = args.dbimages astrom.DATASET_ROOT = args.dbimages astrom_uri = storage.get_cands_uri(args.field, ccd=args.ccd, version=args.type, prefix=prefix, ext="measure3.{}.astrom".format(ext)) if args.astrom_filename is None: astrom_filename = os.path.basename(astrom_uri) else: astrom_filename = args.astrom_filename if not os.access(astrom_filename, os.F_OK): astrom_filename = os.path.dirname(astrom_uri) + "/" + astrom_filename # Load the list of astrometric observations that will be looked at. fk_candidate_observations = astrom.parse(astrom_filename)
[ "def", "rates_angles", "(", "fk_candidate_observations", ")", ":", "detections", "=", "fk_candidate_observations", ".", "get_sources", "(", ")", "for", "detection", "in", "detections", ":", "measures", "=", "detection", ".", "get_readings", "(", ")", "for", "measu...
:param fk_candidate_observations: name of the fk*reals.astrom file to check against Object.planted
[ ":", "param", "fk_candidate_observations", ":", "name", "of", "the", "fk", "*", "reals", ".", "astrom", "file", "to", "check", "against", "Object", ".", "planted" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/scripts/rate_angle_check.py#L47-L97
OSSOS/MOP
src/ossos/core/ossos/downloads/core.py
Downloader.download_hdulist
def download_hdulist(self, uri, **kwargs): """ Downloads a FITS image as a HDUList. Args: uri: The URI of the FITS image to download. kwargs: optional arguments to pass to the vos client. For example, passing view="cutout" and cutout=[1] will result in a cutout of extension 1 from the FITS image specified by the URI. Returns: hdulist: astropy.io.fits.hdu.hdulist.HDUList The requests FITS image as an Astropy HDUList object (http://docs.astropy.org/en/latest/io/fits/api/hdulists.html). """ logger.debug(str(kwargs)) hdulist = None try: vobj = storage.vofile(uri, **kwargs) try: fobj = cStringIO.StringIO(vobj.read()) fobj.seek(0) hdulist = fits.open(fobj) except Exception as e: sys.stderr.write("ERROR: {}\n".format(str(e))) sys.stderr.write("While loading {} {}\n".format(uri, kwargs)) pass finally: vobj.close() except Exception as e: sys.stderr.write(str(e)+"\n") sys.stderr.write("While opening connection to {}.\n".format(uri)) sys.stderr.write("Sending back FLAT instead, too keep display happy.") hdulist = self.download_hdulist('vos:OSSOS/dbimages/calibrators/13AQ05_r_flat.fits', **kwargs) return hdulist
python
def download_hdulist(self, uri, **kwargs): """ Downloads a FITS image as a HDUList. Args: uri: The URI of the FITS image to download. kwargs: optional arguments to pass to the vos client. For example, passing view="cutout" and cutout=[1] will result in a cutout of extension 1 from the FITS image specified by the URI. Returns: hdulist: astropy.io.fits.hdu.hdulist.HDUList The requests FITS image as an Astropy HDUList object (http://docs.astropy.org/en/latest/io/fits/api/hdulists.html). """ logger.debug(str(kwargs)) hdulist = None try: vobj = storage.vofile(uri, **kwargs) try: fobj = cStringIO.StringIO(vobj.read()) fobj.seek(0) hdulist = fits.open(fobj) except Exception as e: sys.stderr.write("ERROR: {}\n".format(str(e))) sys.stderr.write("While loading {} {}\n".format(uri, kwargs)) pass finally: vobj.close() except Exception as e: sys.stderr.write(str(e)+"\n") sys.stderr.write("While opening connection to {}.\n".format(uri)) sys.stderr.write("Sending back FLAT instead, too keep display happy.") hdulist = self.download_hdulist('vos:OSSOS/dbimages/calibrators/13AQ05_r_flat.fits', **kwargs) return hdulist
[ "def", "download_hdulist", "(", "self", ",", "uri", ",", "*", "*", "kwargs", ")", ":", "logger", ".", "debug", "(", "str", "(", "kwargs", ")", ")", "hdulist", "=", "None", "try", ":", "vobj", "=", "storage", ".", "vofile", "(", "uri", ",", "*", "...
Downloads a FITS image as a HDUList. Args: uri: The URI of the FITS image to download. kwargs: optional arguments to pass to the vos client. For example, passing view="cutout" and cutout=[1] will result in a cutout of extension 1 from the FITS image specified by the URI. Returns: hdulist: astropy.io.fits.hdu.hdulist.HDUList The requests FITS image as an Astropy HDUList object (http://docs.astropy.org/en/latest/io/fits/api/hdulists.html).
[ "Downloads", "a", "FITS", "image", "as", "a", "HDUList", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/downloads/core.py#L17-L53
OSSOS/MOP
src/ossos/core/ossos/downloads/core.py
Downloader.download_apcor
def download_apcor(self, uri): """ Downloads apcor data. Args: uri: The URI of the apcor data file. Returns: apcor: ossos.downloads.core.ApcorData """ local_file = os.path.basename(uri) if os.access(local_file, os.F_OK): fobj = open(local_file) else: fobj = storage.vofile(uri, view='data') fobj.seek(0) str = fobj.read() fobj.close() apcor_str = str return ApcorData.from_string(apcor_str)
python
def download_apcor(self, uri): """ Downloads apcor data. Args: uri: The URI of the apcor data file. Returns: apcor: ossos.downloads.core.ApcorData """ local_file = os.path.basename(uri) if os.access(local_file, os.F_OK): fobj = open(local_file) else: fobj = storage.vofile(uri, view='data') fobj.seek(0) str = fobj.read() fobj.close() apcor_str = str return ApcorData.from_string(apcor_str)
[ "def", "download_apcor", "(", "self", ",", "uri", ")", ":", "local_file", "=", "os", ".", "path", ".", "basename", "(", "uri", ")", "if", "os", ".", "access", "(", "local_file", ",", "os", ".", "F_OK", ")", ":", "fobj", "=", "open", "(", "local_fil...
Downloads apcor data. Args: uri: The URI of the apcor data file. Returns: apcor: ossos.downloads.core.ApcorData
[ "Downloads", "apcor", "data", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/downloads/core.py#L55-L75
OSSOS/MOP
src/ossos/core/ossos/downloads/core.py
ApcorData.from_string
def from_string(cls, rawstr): """ Creates an ApcorData record from the raw string format. Expected string format: ap_in ap_out ap_cor apcor_err """ try: args = map(float, rawstr.split()) except Exception as ex: import sys logger.error("Failed to convert aperture correction: {}".format(ex)) raise ex return cls(*args)
python
def from_string(cls, rawstr): """ Creates an ApcorData record from the raw string format. Expected string format: ap_in ap_out ap_cor apcor_err """ try: args = map(float, rawstr.split()) except Exception as ex: import sys logger.error("Failed to convert aperture correction: {}".format(ex)) raise ex return cls(*args)
[ "def", "from_string", "(", "cls", ",", "rawstr", ")", ":", "try", ":", "args", "=", "map", "(", "float", ",", "rawstr", ".", "split", "(", ")", ")", "except", "Exception", "as", "ex", ":", "import", "sys", "logger", ".", "error", "(", "\"Failed to co...
Creates an ApcorData record from the raw string format. Expected string format: ap_in ap_out ap_cor apcor_err
[ "Creates", "an", "ApcorData", "record", "from", "the", "raw", "string", "format", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/downloads/core.py#L95-L108
OSSOS/MOP
src/ossos/core/ossos/pipeline/mkpsf.py
run
def run(expnum, ccd, version, dry_run=False, prefix="", force=False): """Run the OSSOS jmpmakepsf script. """ message = storage.SUCCESS if storage.get_status(task, prefix, expnum, version=version, ccd=ccd) and not force: logging.info("{} completed successfully for {} {} {} {}".format(task, prefix, expnum, version, ccd)) return with storage.LoggingManager(task, prefix, expnum, ccd, version, dry_run): try: if not storage.get_status(dependency, prefix, expnum, "p", ccd=ccd): raise IOError("{} not yet run for {}".format(dependency, expnum)) # confirm destination directory exists. destdir = os.path.dirname( storage.dbimages_uri(expnum, ccd, prefix=prefix, version=version, ext='fits')) if not dry_run: storage.mkdir(destdir) # get image from the vospace storage area logging.info("Getting fits image from VOSpace") filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) # get mopheader from the vospace storage area logging.info("Getting mopheader from VOSpace") mopheader_filename = storage.get_file(expnum, ccd, version=version, prefix=prefix, ext='mopheader') # run mkpsf process logging.info("Running mkpsf on %s %d" % (expnum, ccd)) logging.info(util.exec_prog(['jmpmakepsf.csh', './', filename, 'yes', 'yes'])) if dry_run: return # place the results into VOSpace basename = os.path.splitext(filename)[0] for ext in ('mopheader', 'psf.fits', 'zeropoint.used', 'apcor', 'fwhm', 'phot'): dest = storage.dbimages_uri(expnum, ccd, prefix=prefix, version=version, ext=ext) source = basename + "." + str(ext) count = 0 with open(source, 'r'): while True: count += 1 try: logging.info("Attempt {} to copy {} -> {}".format(count, source, dest)) storage.copy(source, dest) break except Exception as ex: if count > 10: raise ex # set some data parameters associated with the image, determined in this step. storage.set_status('fwhm', prefix, expnum, version=version, ccd=ccd, status=str(storage.get_fwhm( expnum, ccd=ccd, prefix=prefix, version=version))) storage.set_status('zeropoint', prefix, expnum, version=version, ccd=ccd, status=str(storage.get_zeropoint( expnum, ccd=ccd, prefix=prefix, version=version))) logging.info(message) except Exception as e: message = str(e) logging.error(message) storage.set_status(task, prefix, expnum, version, ccd=ccd, status=message) return
python
def run(expnum, ccd, version, dry_run=False, prefix="", force=False): """Run the OSSOS jmpmakepsf script. """ message = storage.SUCCESS if storage.get_status(task, prefix, expnum, version=version, ccd=ccd) and not force: logging.info("{} completed successfully for {} {} {} {}".format(task, prefix, expnum, version, ccd)) return with storage.LoggingManager(task, prefix, expnum, ccd, version, dry_run): try: if not storage.get_status(dependency, prefix, expnum, "p", ccd=ccd): raise IOError("{} not yet run for {}".format(dependency, expnum)) # confirm destination directory exists. destdir = os.path.dirname( storage.dbimages_uri(expnum, ccd, prefix=prefix, version=version, ext='fits')) if not dry_run: storage.mkdir(destdir) # get image from the vospace storage area logging.info("Getting fits image from VOSpace") filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) # get mopheader from the vospace storage area logging.info("Getting mopheader from VOSpace") mopheader_filename = storage.get_file(expnum, ccd, version=version, prefix=prefix, ext='mopheader') # run mkpsf process logging.info("Running mkpsf on %s %d" % (expnum, ccd)) logging.info(util.exec_prog(['jmpmakepsf.csh', './', filename, 'yes', 'yes'])) if dry_run: return # place the results into VOSpace basename = os.path.splitext(filename)[0] for ext in ('mopheader', 'psf.fits', 'zeropoint.used', 'apcor', 'fwhm', 'phot'): dest = storage.dbimages_uri(expnum, ccd, prefix=prefix, version=version, ext=ext) source = basename + "." + str(ext) count = 0 with open(source, 'r'): while True: count += 1 try: logging.info("Attempt {} to copy {} -> {}".format(count, source, dest)) storage.copy(source, dest) break except Exception as ex: if count > 10: raise ex # set some data parameters associated with the image, determined in this step. storage.set_status('fwhm', prefix, expnum, version=version, ccd=ccd, status=str(storage.get_fwhm( expnum, ccd=ccd, prefix=prefix, version=version))) storage.set_status('zeropoint', prefix, expnum, version=version, ccd=ccd, status=str(storage.get_zeropoint( expnum, ccd=ccd, prefix=prefix, version=version))) logging.info(message) except Exception as e: message = str(e) logging.error(message) storage.set_status(task, prefix, expnum, version, ccd=ccd, status=message) return
[ "def", "run", "(", "expnum", ",", "ccd", ",", "version", ",", "dry_run", "=", "False", ",", "prefix", "=", "\"\"", ",", "force", "=", "False", ")", ":", "message", "=", "storage", ".", "SUCCESS", "if", "storage", ".", "get_status", "(", "task", ",", ...
Run the OSSOS jmpmakepsf script.
[ "Run", "the", "OSSOS", "jmpmakepsf", "script", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/pipeline/mkpsf.py#L37-L109
OSSOS/MOP
src/ossos/core/ossos/pipeline/mk_mopheader.py
run
def run(expnum, ccd, version, dry_run=False, prefix="", force=False, ignore_dependency=False): """Run the OSSOS mopheader script. """ message = storage.SUCCESS logging.info("Attempting to get status on header for {} {}".format(expnum, ccd)) if storage.get_status(task, prefix, expnum, version, ccd) and not force: logging.info("{} completed successfully for {} {} {} {}".format(task, prefix, expnum, version, ccd)) return message with storage.LoggingManager(task, prefix, expnum, ccd, version, dry_run): try: logging.info("Building a mopheader ") if not storage.get_status(dependency, prefix, expnum, "p", 36) and not ignore_dependency: raise IOError("{} not yet run for {}".format(dependency, expnum)) # confirm destination directory exists. destdir = os.path.dirname(storage.dbimages_uri(expnum, ccd, prefix=prefix, version=version, ext='fits')) if not dry_run: storage.mkdir(destdir) # get image from the vospace storage area logging.info("Retrieving image from VOSpace") filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) # launch the stepZjmp program logging.info("Launching stepZ on %s %d" % (expnum, ccd)) expname = os.path.basename(filename).strip('.fits') logging.info(util.exec_prog(['stepZjmp', '-f', expname])) # if this is a dry run then we are finished if dry_run: return message # push the header to the VOSpace mopheader_filename = expname+".mopheader" destination = storage.dbimages_uri(expnum, ccd, prefix=prefix, version=version, ext='mopheader') source = mopheader_filename count = 0 with open(source, 'r'): while True: try: count += 1 logging.info("Attempt {} to copy {} -> {}".format(count, source, destination)) storage.copy(source, destination) break except Exception as ex: if count > 10: raise ex logging.info(message) except CalledProcessError as cpe: message = str(cpe.output) logging.error(message) except Exception as e: message = str(e) logging.error(message) if not dry_run: storage.set_status(task, prefix, expnum, version=version, ccd=ccd, status=message) return message
python
def run(expnum, ccd, version, dry_run=False, prefix="", force=False, ignore_dependency=False): """Run the OSSOS mopheader script. """ message = storage.SUCCESS logging.info("Attempting to get status on header for {} {}".format(expnum, ccd)) if storage.get_status(task, prefix, expnum, version, ccd) and not force: logging.info("{} completed successfully for {} {} {} {}".format(task, prefix, expnum, version, ccd)) return message with storage.LoggingManager(task, prefix, expnum, ccd, version, dry_run): try: logging.info("Building a mopheader ") if not storage.get_status(dependency, prefix, expnum, "p", 36) and not ignore_dependency: raise IOError("{} not yet run for {}".format(dependency, expnum)) # confirm destination directory exists. destdir = os.path.dirname(storage.dbimages_uri(expnum, ccd, prefix=prefix, version=version, ext='fits')) if not dry_run: storage.mkdir(destdir) # get image from the vospace storage area logging.info("Retrieving image from VOSpace") filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) # launch the stepZjmp program logging.info("Launching stepZ on %s %d" % (expnum, ccd)) expname = os.path.basename(filename).strip('.fits') logging.info(util.exec_prog(['stepZjmp', '-f', expname])) # if this is a dry run then we are finished if dry_run: return message # push the header to the VOSpace mopheader_filename = expname+".mopheader" destination = storage.dbimages_uri(expnum, ccd, prefix=prefix, version=version, ext='mopheader') source = mopheader_filename count = 0 with open(source, 'r'): while True: try: count += 1 logging.info("Attempt {} to copy {} -> {}".format(count, source, destination)) storage.copy(source, destination) break except Exception as ex: if count > 10: raise ex logging.info(message) except CalledProcessError as cpe: message = str(cpe.output) logging.error(message) except Exception as e: message = str(e) logging.error(message) if not dry_run: storage.set_status(task, prefix, expnum, version=version, ccd=ccd, status=message) return message
[ "def", "run", "(", "expnum", ",", "ccd", ",", "version", ",", "dry_run", "=", "False", ",", "prefix", "=", "\"\"", ",", "force", "=", "False", ",", "ignore_dependency", "=", "False", ")", ":", "message", "=", "storage", ".", "SUCCESS", "logging", ".", ...
Run the OSSOS mopheader script.
[ "Run", "the", "OSSOS", "mopheader", "script", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/pipeline/mk_mopheader.py#L39-L100
OSSOS/MOP
src/jjk/preproc/ephemSearch.py
htmIndex
def htmIndex(ra,dec,htm_level=3): """Compute htm index of htm_level at position ra,dec""" import re if os.uname()[0] == "Linux": javabin = '/opt/java2/bin/java ' htm_level = htm_level verc_htm_cmd = javabin+'-classpath /usr/cadc/misc/htm/htmIndex.jar edu.jhu.htm.app.lookup %s %s %s' % (htm_level, ra, dec) for result in os.popen( verc_htm_cmd ).readlines(): result = result[:-1] if re.search("ID/Name cc", result): (void, coord ) = result.split("=") (void, junk, htm_index) = coord.split(" ") return htm_index
python
def htmIndex(ra,dec,htm_level=3): """Compute htm index of htm_level at position ra,dec""" import re if os.uname()[0] == "Linux": javabin = '/opt/java2/bin/java ' htm_level = htm_level verc_htm_cmd = javabin+'-classpath /usr/cadc/misc/htm/htmIndex.jar edu.jhu.htm.app.lookup %s %s %s' % (htm_level, ra, dec) for result in os.popen( verc_htm_cmd ).readlines(): result = result[:-1] if re.search("ID/Name cc", result): (void, coord ) = result.split("=") (void, junk, htm_index) = coord.split(" ") return htm_index
[ "def", "htmIndex", "(", "ra", ",", "dec", ",", "htm_level", "=", "3", ")", ":", "import", "re", "if", "os", ".", "uname", "(", ")", "[", "0", "]", "==", "\"Linux\"", ":", "javabin", "=", "'/opt/java2/bin/java '", "htm_level", "=", "htm_level", "verc_ht...
Compute htm index of htm_level at position ra,dec
[ "Compute", "htm", "index", "of", "htm_level", "at", "position", "ra", "dec" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/ephemSearch.py#L70-L82
OSSOS/MOP
src/jjk/preproc/ephemSearch.py
circTOcutout
def circTOcutout(wcs,ra,dec,rad): """Convert an RA/DEC/RADIUS to an imcopy cutout""" (x1,y1)=wcs.rd2xy((ra+rad/2.0,dec-rad/2.0)) (x2,y2)=wcs.rd2xy((ra-rad/2.0,dec+rad/2.0)) xl=min(x1,x2) xr=max(x1,x2) yl=min(y1,y2) yu=max(y1,y2) ### constrain the cutout to be inside the image x1=max(xl,1) x1=int(min(x1,wcs.naxis1)) x2=max(xr,x1,1) x2=int(min(x2,wcs.naxis1)) y1=max(yl,1) y1=int(min(y1,wcs.naxis2)) y2=max(yu,y1,1) y2=int(min(y2,wcs.naxis2)) area=(x2-x1)*(y2-y1) cutout="[%d:%d,%d:%d]" % ( x1,x2,y1,y2) if not y1<y2 or not x1<x2: cutout=None return (cutout, area)
python
def circTOcutout(wcs,ra,dec,rad): """Convert an RA/DEC/RADIUS to an imcopy cutout""" (x1,y1)=wcs.rd2xy((ra+rad/2.0,dec-rad/2.0)) (x2,y2)=wcs.rd2xy((ra-rad/2.0,dec+rad/2.0)) xl=min(x1,x2) xr=max(x1,x2) yl=min(y1,y2) yu=max(y1,y2) ### constrain the cutout to be inside the image x1=max(xl,1) x1=int(min(x1,wcs.naxis1)) x2=max(xr,x1,1) x2=int(min(x2,wcs.naxis1)) y1=max(yl,1) y1=int(min(y1,wcs.naxis2)) y2=max(yu,y1,1) y2=int(min(y2,wcs.naxis2)) area=(x2-x1)*(y2-y1) cutout="[%d:%d,%d:%d]" % ( x1,x2,y1,y2) if not y1<y2 or not x1<x2: cutout=None return (cutout, area)
[ "def", "circTOcutout", "(", "wcs", ",", "ra", ",", "dec", ",", "rad", ")", ":", "(", "x1", ",", "y1", ")", "=", "wcs", ".", "rd2xy", "(", "(", "ra", "+", "rad", "/", "2.0", ",", "dec", "-", "rad", "/", "2.0", ")", ")", "(", "x2", ",", "y2...
Convert an RA/DEC/RADIUS to an imcopy cutout
[ "Convert", "an", "RA", "/", "DEC", "/", "RADIUS", "to", "an", "imcopy", "cutout" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/ephemSearch.py#L89-L114
OSSOS/MOP
src/jjk/preproc/ephemSearch.py
predict
def predict(abg,date,obs=568): """Run GB's predict using an ABG file as input.""" import orbfit import RO.StringUtil (ra,dec,a,b,ang) = orbfit.predict(abg,date,obs) obj['RA']=ra obj['DEC']=dec obj['dRA']=a obj['dDEC']=b obj['dANG']=ang return obj
python
def predict(abg,date,obs=568): """Run GB's predict using an ABG file as input.""" import orbfit import RO.StringUtil (ra,dec,a,b,ang) = orbfit.predict(abg,date,obs) obj['RA']=ra obj['DEC']=dec obj['dRA']=a obj['dDEC']=b obj['dANG']=ang return obj
[ "def", "predict", "(", "abg", ",", "date", ",", "obs", "=", "568", ")", ":", "import", "orbfit", "import", "RO", ".", "StringUtil", "(", "ra", ",", "dec", ",", "a", ",", "b", ",", "ang", ")", "=", "orbfit", ".", "predict", "(", "abg", ",", "dat...
Run GB's predict using an ABG file as input.
[ "Run", "GB", "s", "predict", "using", "an", "ABG", "file", "as", "input", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/ephemSearch.py#L264-L274
OSSOS/MOP
src/ossos/core/ossos/util.py
config_logging
def config_logging(level): """ Configure the logging given the level desired """ logger = logging.getLogger('') logger.setLevel(level) if level < logging.DEBUG: log_format = "%(asctime)s %(message)s" else: log_format = "%(asctime)s %(module)s : %(lineno)d %(message)s" sh = logging.StreamHandler() sh.formatter = logging.Formatter(fmt=log_format) logger.handlers = [] logger.addHandler(sh)
python
def config_logging(level): """ Configure the logging given the level desired """ logger = logging.getLogger('') logger.setLevel(level) if level < logging.DEBUG: log_format = "%(asctime)s %(message)s" else: log_format = "%(asctime)s %(module)s : %(lineno)d %(message)s" sh = logging.StreamHandler() sh.formatter = logging.Formatter(fmt=log_format) logger.handlers = [] logger.addHandler(sh)
[ "def", "config_logging", "(", "level", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", "''", ")", "logger", ".", "setLevel", "(", "level", ")", "if", "level", "<", "logging", ".", "DEBUG", ":", "log_format", "=", "\"%(asctime)s %(message)s\"", "...
Configure the logging given the level desired
[ "Configure", "the", "logging", "given", "the", "level", "desired" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/util.py#L27-L41
OSSOS/MOP
src/ossos/core/ossos/util.py
exec_prog
def exec_prog(args): """Run a subprocess, check for .OK and raise error if does not exist. args: list of arguments, for value is the command to execute. """ program_name = args[0] logging.info(" ".join(args)) output = subprocess.check_output(args, stderr=subprocess.STDOUT) if not os.access(program_name+".OK", os.F_OK): logging.error("No {}.OK file?".format(program_name)) raise subprocess.CalledProcessError(-1, ' '.join(args), output) os.unlink(program_name+".OK") if os.access(program_name+".FAILED", os.F_OK): os.unlink(program_name+".FAILED") return output
python
def exec_prog(args): """Run a subprocess, check for .OK and raise error if does not exist. args: list of arguments, for value is the command to execute. """ program_name = args[0] logging.info(" ".join(args)) output = subprocess.check_output(args, stderr=subprocess.STDOUT) if not os.access(program_name+".OK", os.F_OK): logging.error("No {}.OK file?".format(program_name)) raise subprocess.CalledProcessError(-1, ' '.join(args), output) os.unlink(program_name+".OK") if os.access(program_name+".FAILED", os.F_OK): os.unlink(program_name+".FAILED") return output
[ "def", "exec_prog", "(", "args", ")", ":", "program_name", "=", "args", "[", "0", "]", "logging", ".", "info", "(", "\" \"", ".", "join", "(", "args", ")", ")", "output", "=", "subprocess", ".", "check_output", "(", "args", ",", "stderr", "=", "subpr...
Run a subprocess, check for .OK and raise error if does not exist. args: list of arguments, for value is the command to execute.
[ "Run", "a", "subprocess", "check", "for", ".", "OK", "and", "raise", "error", "if", "does", "not", "exist", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/util.py#L58-L73
OSSOS/MOP
src/ossos/core/ossos/util.py
get_pixel_bounds_from_datasec_keyword
def get_pixel_bounds_from_datasec_keyword(datasec): """ Return the x/y pixel boundaries of the data section. :param datasec: str e.g. '[33:2080,1:4612]' :return: ((xmin,xmax),(ymin,ymax)) """ datasec = re.findall(r'(\d+)', datasec) x1 = min(int(datasec[0]), int(datasec[1])) x2 = max(int(datasec[0]), int(datasec[1])) y1 = min(int(datasec[2]), int(datasec[3])) y2 = max(int(datasec[2]), int(datasec[3])) return (x1, x2), (y1, y2)
python
def get_pixel_bounds_from_datasec_keyword(datasec): """ Return the x/y pixel boundaries of the data section. :param datasec: str e.g. '[33:2080,1:4612]' :return: ((xmin,xmax),(ymin,ymax)) """ datasec = re.findall(r'(\d+)', datasec) x1 = min(int(datasec[0]), int(datasec[1])) x2 = max(int(datasec[0]), int(datasec[1])) y1 = min(int(datasec[2]), int(datasec[3])) y2 = max(int(datasec[2]), int(datasec[3])) return (x1, x2), (y1, y2)
[ "def", "get_pixel_bounds_from_datasec_keyword", "(", "datasec", ")", ":", "datasec", "=", "re", ".", "findall", "(", "r'(\\d+)'", ",", "datasec", ")", "x1", "=", "min", "(", "int", "(", "datasec", "[", "0", "]", ")", ",", "int", "(", "datasec", "[", "1...
Return the x/y pixel boundaries of the data section. :param datasec: str e.g. '[33:2080,1:4612]' :return: ((xmin,xmax),(ymin,ymax))
[ "Return", "the", "x", "/", "y", "pixel", "boundaries", "of", "the", "data", "section", ".", ":", "param", "datasec", ":", "str", "e", ".", "g", ".", "[", "33", ":", "2080", "1", ":", "4612", "]", ":", "return", ":", "((", "xmin", "xmax", ")", "...
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/util.py#L133-L145
OSSOS/MOP
src/ossos/core/ossos/util.py
match_lists
def match_lists(pos1, pos2, tolerance=MATCH_TOLERANCE, spherical=False): """ Given two sets of x/y positions match the lists, uniquely. :rtype : numpy.ma, numpy.ma :param pos1: list of x/y positions. :param pos2: list of x/y positions. :param tolerance: float distance, in pixels, to consider a match Algorithm: - Find all the members of pos2 that are within tolerance of pos1[idx1]. These pos2 members are match_group_1 - Find all the members of pos1 that are within tolerance of match_group_1[idx2]. These pos1 members are match_group_2 - If pos1[idx] is in match_group_2 then pos1[idx] is a match of object at match_group_1[idx2] """ assert isinstance(pos1, numpy.ndarray) assert isinstance(pos2, numpy.ndarray) # build some arrays to hold the index of things that matched between lists. npts2 = npts1 = 0 if len(pos1) > 0: npts1 = len(pos1[:, 0]) pos1_idx_array = numpy.arange(npts1, dtype=numpy.int16) if len(pos2) > 0: npts2 = len(pos2[:, 0]) pos2_idx_array = numpy.arange(npts2, dtype=numpy.int16) # this is the array of final matched index, -1 indicates no match found. match1 = numpy.ma.zeros(npts1, dtype=numpy.int16) match1.mask = True # this is the array of matches in pos2, -1 indicates no match found. match2 = numpy.ma.zeros(npts2, dtype=numpy.int16) match2.mask = True # if one of the two input arrays are zero length then there is no matching to do. if npts1 * npts2 == 0: return match1, match2 for idx1 in range(npts1): # compute the distance source idx1 to each member of pos2 if not spherical : sep = numpy.sqrt((pos2[:, 0] - pos1[idx1, 0]) ** 2 + (pos2[:, 1] - pos1[idx1, 1]) ** 2) else: sep = numpy.sqrt((numpy.cos(numpy.radians(pos1[idx1,1]))*(pos2[:, 0] - pos1[idx1, 0])) ** 2 + (pos2[:, 1] - pos1[idx1, 1]) ** 2) # considered a match if sep is below tolerance and is the closest match available. match_condition = numpy.all((sep <= tolerance, sep == sep.min()), axis=0) # match_group_1 is list of the indexes of pos2 entries that qualified as possible matches to pos1[idx1] match_group_1 = pos2_idx_array[match_condition] # For each of those pos2 objects that could be a match to pos1[idx] find the best match in all of pos1 for idx2 in match_group_1: # compute the distance from this pos2 object that is a possible match to pos1[idx1] to all members of pos1 sep = numpy.sqrt((pos1[:, 0] - pos2[idx2, 0]) ** 2 + (pos1[:, 1] - pos2[idx2, 1]) ** 2) # considered a match if sep is below tolerance and is the closest match available. match_condition = numpy.all((sep <= tolerance, sep == sep.min()), axis=0) match_group_2 = pos1_idx_array[match_condition] # Are any of the pos1 members that were matches to the matched pos2 member the pos1[idx] entry? if idx1 in match_group_2: match1[idx1] = idx2 match2[idx2] = idx1 # this BREAK is in here since once we have a match we're done. break return match1, match2
python
def match_lists(pos1, pos2, tolerance=MATCH_TOLERANCE, spherical=False): """ Given two sets of x/y positions match the lists, uniquely. :rtype : numpy.ma, numpy.ma :param pos1: list of x/y positions. :param pos2: list of x/y positions. :param tolerance: float distance, in pixels, to consider a match Algorithm: - Find all the members of pos2 that are within tolerance of pos1[idx1]. These pos2 members are match_group_1 - Find all the members of pos1 that are within tolerance of match_group_1[idx2]. These pos1 members are match_group_2 - If pos1[idx] is in match_group_2 then pos1[idx] is a match of object at match_group_1[idx2] """ assert isinstance(pos1, numpy.ndarray) assert isinstance(pos2, numpy.ndarray) # build some arrays to hold the index of things that matched between lists. npts2 = npts1 = 0 if len(pos1) > 0: npts1 = len(pos1[:, 0]) pos1_idx_array = numpy.arange(npts1, dtype=numpy.int16) if len(pos2) > 0: npts2 = len(pos2[:, 0]) pos2_idx_array = numpy.arange(npts2, dtype=numpy.int16) # this is the array of final matched index, -1 indicates no match found. match1 = numpy.ma.zeros(npts1, dtype=numpy.int16) match1.mask = True # this is the array of matches in pos2, -1 indicates no match found. match2 = numpy.ma.zeros(npts2, dtype=numpy.int16) match2.mask = True # if one of the two input arrays are zero length then there is no matching to do. if npts1 * npts2 == 0: return match1, match2 for idx1 in range(npts1): # compute the distance source idx1 to each member of pos2 if not spherical : sep = numpy.sqrt((pos2[:, 0] - pos1[idx1, 0]) ** 2 + (pos2[:, 1] - pos1[idx1, 1]) ** 2) else: sep = numpy.sqrt((numpy.cos(numpy.radians(pos1[idx1,1]))*(pos2[:, 0] - pos1[idx1, 0])) ** 2 + (pos2[:, 1] - pos1[idx1, 1]) ** 2) # considered a match if sep is below tolerance and is the closest match available. match_condition = numpy.all((sep <= tolerance, sep == sep.min()), axis=0) # match_group_1 is list of the indexes of pos2 entries that qualified as possible matches to pos1[idx1] match_group_1 = pos2_idx_array[match_condition] # For each of those pos2 objects that could be a match to pos1[idx] find the best match in all of pos1 for idx2 in match_group_1: # compute the distance from this pos2 object that is a possible match to pos1[idx1] to all members of pos1 sep = numpy.sqrt((pos1[:, 0] - pos2[idx2, 0]) ** 2 + (pos1[:, 1] - pos2[idx2, 1]) ** 2) # considered a match if sep is below tolerance and is the closest match available. match_condition = numpy.all((sep <= tolerance, sep == sep.min()), axis=0) match_group_2 = pos1_idx_array[match_condition] # Are any of the pos1 members that were matches to the matched pos2 member the pos1[idx] entry? if idx1 in match_group_2: match1[idx1] = idx2 match2[idx2] = idx1 # this BREAK is in here since once we have a match we're done. break return match1, match2
[ "def", "match_lists", "(", "pos1", ",", "pos2", ",", "tolerance", "=", "MATCH_TOLERANCE", ",", "spherical", "=", "False", ")", ":", "assert", "isinstance", "(", "pos1", ",", "numpy", ".", "ndarray", ")", "assert", "isinstance", "(", "pos2", ",", "numpy", ...
Given two sets of x/y positions match the lists, uniquely. :rtype : numpy.ma, numpy.ma :param pos1: list of x/y positions. :param pos2: list of x/y positions. :param tolerance: float distance, in pixels, to consider a match Algorithm: - Find all the members of pos2 that are within tolerance of pos1[idx1]. These pos2 members are match_group_1 - Find all the members of pos1 that are within tolerance of match_group_1[idx2]. These pos1 members are match_group_2 - If pos1[idx] is in match_group_2 then pos1[idx] is a match of object at match_group_1[idx2]
[ "Given", "two", "sets", "of", "x", "/", "y", "positions", "match", "the", "lists", "uniquely", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/util.py#L148-L224
OSSOS/MOP
src/ossos/core/ossos/util.py
VOFileHandler.stream
def stream(self): """ the stream to write the log content too. @return: """ if self._stream is None: self._stream = tempfile.NamedTemporaryFile(delete=False) try: self._stream.write(self.client.open(self.filename, view='data').read()) except: pass return self._stream
python
def stream(self): """ the stream to write the log content too. @return: """ if self._stream is None: self._stream = tempfile.NamedTemporaryFile(delete=False) try: self._stream.write(self.client.open(self.filename, view='data').read()) except: pass return self._stream
[ "def", "stream", "(", "self", ")", ":", "if", "self", ".", "_stream", "is", "None", ":", "self", ".", "_stream", "=", "tempfile", ".", "NamedTemporaryFile", "(", "delete", "=", "False", ")", "try", ":", "self", ".", "_stream", ".", "write", "(", "sel...
the stream to write the log content too. @return:
[ "the", "stream", "to", "write", "the", "log", "content", "too", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/util.py#L87-L98
OSSOS/MOP
src/ossos/core/ossos/util.py
VOFileHandler.client
def client(self): """ Send back the client we were sent, or construct a default one. @rtype vospace.client """ if self._client is not None: return self._client self._client = vospace.client return self._client
python
def client(self): """ Send back the client we were sent, or construct a default one. @rtype vospace.client """ if self._client is not None: return self._client self._client = vospace.client return self._client
[ "def", "client", "(", "self", ")", ":", "if", "self", ".", "_client", "is", "not", "None", ":", "return", "self", ".", "_client", "self", ".", "_client", "=", "vospace", ".", "client", "return", "self", ".", "_client" ]
Send back the client we were sent, or construct a default one. @rtype vospace.client
[ "Send", "back", "the", "client", "we", "were", "sent", "or", "construct", "a", "default", "one", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/util.py#L101-L110
OSSOS/MOP
src/ossos/core/ossos/util.py
VOFileHandler.close
def close(self): """ Closes the stream. """ self.flush() try: if self.stream is not None: self.stream.flush() _name = self.stream.name self.stream.close() self.client.copy(_name, self.filename) except Exception as ex: print str(ex) pass
python
def close(self): """ Closes the stream. """ self.flush() try: if self.stream is not None: self.stream.flush() _name = self.stream.name self.stream.close() self.client.copy(_name, self.filename) except Exception as ex: print str(ex) pass
[ "def", "close", "(", "self", ")", ":", "self", ".", "flush", "(", ")", "try", ":", "if", "self", ".", "stream", "is", "not", "None", ":", "self", ".", "stream", ".", "flush", "(", ")", "_name", "=", "self", ".", "stream", ".", "name", "self", "...
Closes the stream.
[ "Closes", "the", "stream", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/util.py#L112-L125
OSSOS/MOP
src/ossos/core/ossos/util.py
TimeMPC.parse_string
def parse_string(self, timestr, subfmts): """Read time from a single string, using a set of possible formats.""" # Datetime components required for conversion to JD by ERFA, along # with the default values. components = ('year', 'mon', 'mday') defaults = (None, 1, 1, 0) # Assume that anything following "." on the right side is a # floating fraction of a second. try: idot = timestr.rindex('.') except: fracday = 0.0 else: timestr, fracday = timestr[:idot], timestr[idot:] fracday = float(fracday) for _, strptime_fmt_or_regex, _ in subfmts: vals = [] #print strptime_fmt_or_regex if isinstance(strptime_fmt_or_regex, six.string_types): try: #print timstr #print strptime_fmt_or_regex tm = time.strptime(timestr, strptime_fmt_or_regex) tm.tm_hour += int(24 * fracday) tm.tm_min += int(60 * (24 * fracday - tm.tm_hour)) tm.tm_sec += 60 * (60 * (24 * fracday - tm.tm_hour) - tm.tm_min) except ValueError as ex: print ex continue else: vals = [getattr(tm, 'tm_' + component) for component in components] else: tm = re.match(strptime_fmt_or_regex, timestr) if tm is None: continue tm = tm.groupdict() vals = [int(tm.get(component, default)) for component, default in six.moves.zip(components, defaults)] hrprt = int(24 * fracday) vals.append(hrprt) mnprt = int(60 * (24 * fracday - hrprt)) vals.append(mnprt) scprt = 60 * (60 * (24 * fracday - hrprt) - mnprt) vals.append(scprt) return vals else: raise ValueError('Time {0} does not match {1} format' .format(timestr, self.name))
python
def parse_string(self, timestr, subfmts): """Read time from a single string, using a set of possible formats.""" # Datetime components required for conversion to JD by ERFA, along # with the default values. components = ('year', 'mon', 'mday') defaults = (None, 1, 1, 0) # Assume that anything following "." on the right side is a # floating fraction of a second. try: idot = timestr.rindex('.') except: fracday = 0.0 else: timestr, fracday = timestr[:idot], timestr[idot:] fracday = float(fracday) for _, strptime_fmt_or_regex, _ in subfmts: vals = [] #print strptime_fmt_or_regex if isinstance(strptime_fmt_or_regex, six.string_types): try: #print timstr #print strptime_fmt_or_regex tm = time.strptime(timestr, strptime_fmt_or_regex) tm.tm_hour += int(24 * fracday) tm.tm_min += int(60 * (24 * fracday - tm.tm_hour)) tm.tm_sec += 60 * (60 * (24 * fracday - tm.tm_hour) - tm.tm_min) except ValueError as ex: print ex continue else: vals = [getattr(tm, 'tm_' + component) for component in components] else: tm = re.match(strptime_fmt_or_regex, timestr) if tm is None: continue tm = tm.groupdict() vals = [int(tm.get(component, default)) for component, default in six.moves.zip(components, defaults)] hrprt = int(24 * fracday) vals.append(hrprt) mnprt = int(60 * (24 * fracday - hrprt)) vals.append(mnprt) scprt = 60 * (60 * (24 * fracday - hrprt) - mnprt) vals.append(scprt) return vals else: raise ValueError('Time {0} does not match {1} format' .format(timestr, self.name))
[ "def", "parse_string", "(", "self", ",", "timestr", ",", "subfmts", ")", ":", "# Datetime components required for conversion to JD by ERFA, along", "# with the default values.", "components", "=", "(", "'year'", ",", "'mon'", ",", "'mday'", ")", "defaults", "=", "(", ...
Read time from a single string, using a set of possible formats.
[ "Read", "time", "from", "a", "single", "string", "using", "a", "set", "of", "possible", "formats", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/util.py#L258-L309
OSSOS/MOP
src/ossos/core/ossos/util.py
TimeMPC.str_kwargs
def str_kwargs(self): """ Generator that yields a dict of values corresponding to the calendar date and time for the internal JD values. """ iys, ims, ids, ihmsfs = d2dtf(self.scale.upper() .encode('utf8'), 6, self.jd1, self.jd2) # Get the str_fmt element of the first allowed output subformat _, _, str_fmt = self._select_subfmts(self.out_subfmt)[0] yday = None has_yday = '{yday:' in str_fmt or False ihrs = ihmsfs[..., 0] imins = ihmsfs[..., 1] isecs = ihmsfs[..., 2] ifracs = ihmsfs[..., 3] for iy, im, iday, ihr, imin, isec, ifracsec in numpy.nditer( [iys, ims, ids, ihrs, imins, isecs, ifracs]): if has_yday: yday = datetime(iy, im, iday).timetuple().tm_yday fracday = (((((ifracsec / 1000000.0 + isec) / 60.0 + imin) / 60.0) + ihr) / 24.0) * (10 ** 6) fracday = '{0:06g}'.format(fracday)[0:self.precision] yield {'year': int(iy), 'mon': int(im), 'day': int(iday), 'hour': int(ihr), 'min': int(imin), 'sec': int(isec), 'fracsec': int(ifracsec), 'yday': yday, 'fracday': fracday}
python
def str_kwargs(self): """ Generator that yields a dict of values corresponding to the calendar date and time for the internal JD values. """ iys, ims, ids, ihmsfs = d2dtf(self.scale.upper() .encode('utf8'), 6, self.jd1, self.jd2) # Get the str_fmt element of the first allowed output subformat _, _, str_fmt = self._select_subfmts(self.out_subfmt)[0] yday = None has_yday = '{yday:' in str_fmt or False ihrs = ihmsfs[..., 0] imins = ihmsfs[..., 1] isecs = ihmsfs[..., 2] ifracs = ihmsfs[..., 3] for iy, im, iday, ihr, imin, isec, ifracsec in numpy.nditer( [iys, ims, ids, ihrs, imins, isecs, ifracs]): if has_yday: yday = datetime(iy, im, iday).timetuple().tm_yday fracday = (((((ifracsec / 1000000.0 + isec) / 60.0 + imin) / 60.0) + ihr) / 24.0) * (10 ** 6) fracday = '{0:06g}'.format(fracday)[0:self.precision] yield {'year': int(iy), 'mon': int(im), 'day': int(iday), 'hour': int(ihr), 'min': int(imin), 'sec': int(isec), 'fracsec': int(ifracsec), 'yday': yday, 'fracday': fracday}
[ "def", "str_kwargs", "(", "self", ")", ":", "iys", ",", "ims", ",", "ids", ",", "ihmsfs", "=", "d2dtf", "(", "self", ".", "scale", ".", "upper", "(", ")", ".", "encode", "(", "'utf8'", ")", ",", "6", ",", "self", ".", "jd1", ",", "self", ".", ...
Generator that yields a dict of values corresponding to the calendar date and time for the internal JD values.
[ "Generator", "that", "yields", "a", "dict", "of", "values", "corresponding", "to", "the", "calendar", "date", "and", "time", "for", "the", "internal", "JD", "values", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/util.py#L311-L341
OSSOS/MOP
src/jjk/webCat/MOPdbaccess.py
_get_db_options
def _get_db_options(args): """Parse through a command line of arguments to over-ride the values in the users .dbrc file. If no user name is given then the environment variable $USERNAME is used. If $USERNAME is not defined then prompt for input. """ import optik, getpass,sys from optik import OptionParser parser=OptionParser() parser.add_option("-d","--database", action="store", type="string", dest="database", default="cfht", help="Name of the SYBASE database containing TABLE", metavar="FILE") parser.add_option("-u","--user", action="store", type="string", dest="user", default=getpass.getuser(), help="User name to access db with", metavar="USER") (opt, unused_args) = parser.parse_args(args) return opt.database,opt.user,unused_args
python
def _get_db_options(args): """Parse through a command line of arguments to over-ride the values in the users .dbrc file. If no user name is given then the environment variable $USERNAME is used. If $USERNAME is not defined then prompt for input. """ import optik, getpass,sys from optik import OptionParser parser=OptionParser() parser.add_option("-d","--database", action="store", type="string", dest="database", default="cfht", help="Name of the SYBASE database containing TABLE", metavar="FILE") parser.add_option("-u","--user", action="store", type="string", dest="user", default=getpass.getuser(), help="User name to access db with", metavar="USER") (opt, unused_args) = parser.parse_args(args) return opt.database,opt.user,unused_args
[ "def", "_get_db_options", "(", "args", ")", ":", "import", "optik", ",", "getpass", ",", "sys", "from", "optik", "import", "OptionParser", "parser", "=", "OptionParser", "(", ")", "parser", ".", "add_option", "(", "\"-d\"", ",", "\"--database\"", ",", "actio...
Parse through a command line of arguments to over-ride the values in the users .dbrc file. If no user name is given then the environment variable $USERNAME is used. If $USERNAME is not defined then prompt for input.
[ "Parse", "through", "a", "command", "line", "of", "arguments", "to", "over", "-", "ride", "the", "values", "in", "the", "users", ".", "dbrc", "file", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/webCat/MOPdbaccess.py#L33-L54
OSSOS/MOP
src/jjk/webCat/MOPdbaccess.py
_get_db_password
def _get_db_password(dbSystem,db,user): """Read through the users .dbrc file to get password for the db/user combination suplied. If no password is found then prompt for one """ import string, getpass, os dbrc = os.environ['HOME']+"/.dbrc" password={} if os.access(dbrc,os.R_OK): fd=open(dbrc) lines=fd.readlines() for line in lines: entry=line.split() if entry[0]==dbSystem and entry[1]==db and entry[2]==user: return entry[3] return getpass.getpass()
python
def _get_db_password(dbSystem,db,user): """Read through the users .dbrc file to get password for the db/user combination suplied. If no password is found then prompt for one """ import string, getpass, os dbrc = os.environ['HOME']+"/.dbrc" password={} if os.access(dbrc,os.R_OK): fd=open(dbrc) lines=fd.readlines() for line in lines: entry=line.split() if entry[0]==dbSystem and entry[1]==db and entry[2]==user: return entry[3] return getpass.getpass()
[ "def", "_get_db_password", "(", "dbSystem", ",", "db", ",", "user", ")", ":", "import", "string", ",", "getpass", ",", "os", "dbrc", "=", "os", ".", "environ", "[", "'HOME'", "]", "+", "\"/.dbrc\"", "password", "=", "{", "}", "if", "os", ".", "access...
Read through the users .dbrc file to get password for the db/user combination suplied. If no password is found then prompt for one
[ "Read", "through", "the", "users", ".", "dbrc", "file", "to", "get", "password", "for", "the", "db", "/", "user", "combination", "suplied", ".", "If", "no", "password", "is", "found", "then", "prompt", "for", "one" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/webCat/MOPdbaccess.py#L56-L71
OSSOS/MOP
src/jjk/webCat/MOPdbaccess.py
_get_db_connect
def _get_db_connect(dbSystem,db,user,password): """Create a connection to the database specified on the command line """ if dbSystem=='SYBASE': import Sybase try: dbh = Sybase.connect(dbSystem, user, password, database=db ) except: dbh=None elif dbSystem=='MYSQL': import MySQLdb try: dbh = MySQLdb.connect(user=user, passwd=password, db=db , host='gimli') except: dbh=None return dbh
python
def _get_db_connect(dbSystem,db,user,password): """Create a connection to the database specified on the command line """ if dbSystem=='SYBASE': import Sybase try: dbh = Sybase.connect(dbSystem, user, password, database=db ) except: dbh=None elif dbSystem=='MYSQL': import MySQLdb try: dbh = MySQLdb.connect(user=user, passwd=password, db=db , host='gimli') except: dbh=None return dbh
[ "def", "_get_db_connect", "(", "dbSystem", ",", "db", ",", "user", ",", "password", ")", ":", "if", "dbSystem", "==", "'SYBASE'", ":", "import", "Sybase", "try", ":", "dbh", "=", "Sybase", ".", "connect", "(", "dbSystem", ",", "user", ",", "password", ...
Create a connection to the database specified on the command line
[ "Create", "a", "connection", "to", "the", "database", "specified", "on", "the", "command", "line" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/webCat/MOPdbaccess.py#L73-L95
OSSOS/MOP
src/ossos/utils/search.py
caom2
def caom2(mpc_filename, search_date="2014 07 24.0"): """ builds a TSV file in the format of SSOIS by querying for possilbe observations in CADC/CAOM2. This is a fall back program, should only be useful when SSOIS is behind. """ columns = ('Image', 'Ext', 'X', 'Y', 'MJD', 'Filter', 'Exptime', 'Object_RA', 'Object_Dec', 'Image_target', 'Telescope/Instrument', 'MetaData', 'Datalink') ephem_table = Table(names=columns, dtypes=('S10', 'i4', 'f8', 'f8', 'f8', 'S10', 'f8', 'f8', 'f8', 'S20', 'S20', 'S20', 'S50')) ephem_table.pprint() o = orbfit.Orbfit(mpc.MPCReader(mpc_filename).mpc_observations) o.predict(search_date) fields = storage.cone_search(o.coordinate.ra.degrees, o.coordinate.dec.degrees, dra=0.3, ddec=0.3, calibration_level=1) mjdates = numpy.unique(fields['mjdate']) collectionIDs = [] for mjdate in mjdates: jd = 2400000.5 + mjdate o.predict(jd) for field in storage.cone_search(o.coordinate.ra.degrees, o.coordinate.dec.degrees, dra=30./3600.0, ddec=30./3600.0, mjdate=mjdate, calibration_level=1): collectionIDs.append(field['collectionID']) expnums = numpy.unique(numpy.array(collectionIDs)) for expnum in expnums: header = storage.get_astheader(expnum, 22) o.predict(header['MJDATE']+2400000.5) print o.time.iso, o.coordinate.ra.degrees, o.coordinate.dec.degrees for ccd in range(36): header = storage.get_astheader(expnum, ccd) w = wcs.WCS(header) (x, y) = w.sky2xy(o.coordinate.ra.degrees, o.coordinate.dec.degrees) print ccd, x, y if 0 < x < header['NAXIS1'] and 0 < y < header['NAXIS2']: ephem_table.add_row([expnum, ccd+1, x, y, header['MJDATE'], header['FILTER'], header['EXPTIME'], o.coordinate.ra.degrees, o.coordinate.dec.degrees, header['OBJECT'], 'CFHT/MegaCam', None, "http://www.cadc-ccda.hia-iha.nrc-cnrc.gc.ca/data/pub/CFHT/{}p[{}]".format(expnum, ccd)]) break ephem_table.pprint() ephem_table.write('backdoor.tsv', format='ascii', delimiter='\t')
python
def caom2(mpc_filename, search_date="2014 07 24.0"): """ builds a TSV file in the format of SSOIS by querying for possilbe observations in CADC/CAOM2. This is a fall back program, should only be useful when SSOIS is behind. """ columns = ('Image', 'Ext', 'X', 'Y', 'MJD', 'Filter', 'Exptime', 'Object_RA', 'Object_Dec', 'Image_target', 'Telescope/Instrument', 'MetaData', 'Datalink') ephem_table = Table(names=columns, dtypes=('S10', 'i4', 'f8', 'f8', 'f8', 'S10', 'f8', 'f8', 'f8', 'S20', 'S20', 'S20', 'S50')) ephem_table.pprint() o = orbfit.Orbfit(mpc.MPCReader(mpc_filename).mpc_observations) o.predict(search_date) fields = storage.cone_search(o.coordinate.ra.degrees, o.coordinate.dec.degrees, dra=0.3, ddec=0.3, calibration_level=1) mjdates = numpy.unique(fields['mjdate']) collectionIDs = [] for mjdate in mjdates: jd = 2400000.5 + mjdate o.predict(jd) for field in storage.cone_search(o.coordinate.ra.degrees, o.coordinate.dec.degrees, dra=30./3600.0, ddec=30./3600.0, mjdate=mjdate, calibration_level=1): collectionIDs.append(field['collectionID']) expnums = numpy.unique(numpy.array(collectionIDs)) for expnum in expnums: header = storage.get_astheader(expnum, 22) o.predict(header['MJDATE']+2400000.5) print o.time.iso, o.coordinate.ra.degrees, o.coordinate.dec.degrees for ccd in range(36): header = storage.get_astheader(expnum, ccd) w = wcs.WCS(header) (x, y) = w.sky2xy(o.coordinate.ra.degrees, o.coordinate.dec.degrees) print ccd, x, y if 0 < x < header['NAXIS1'] and 0 < y < header['NAXIS2']: ephem_table.add_row([expnum, ccd+1, x, y, header['MJDATE'], header['FILTER'], header['EXPTIME'], o.coordinate.ra.degrees, o.coordinate.dec.degrees, header['OBJECT'], 'CFHT/MegaCam', None, "http://www.cadc-ccda.hia-iha.nrc-cnrc.gc.ca/data/pub/CFHT/{}p[{}]".format(expnum, ccd)]) break ephem_table.pprint() ephem_table.write('backdoor.tsv', format='ascii', delimiter='\t')
[ "def", "caom2", "(", "mpc_filename", ",", "search_date", "=", "\"2014 07 24.0\"", ")", ":", "columns", "=", "(", "'Image'", ",", "'Ext'", ",", "'X'", ",", "'Y'", ",", "'MJD'", ",", "'Filter'", ",", "'Exptime'", ",", "'Object_RA'", ",", "'Object_Dec'", ",",...
builds a TSV file in the format of SSOIS by querying for possilbe observations in CADC/CAOM2. This is a fall back program, should only be useful when SSOIS is behind.
[ "builds", "a", "TSV", "file", "in", "the", "format", "of", "SSOIS", "by", "querying", "for", "possilbe", "observations", "in", "CADC", "/", "CAOM2", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/utils/search.py#L10-L73
OSSOS/MOP
src/ossos/core/ossos/downloads/cutouts/downloader.py
ImageCutoutDownloader.download_cutout
def download_cutout(self, reading, focus=None, needs_apcor=False): """ Downloads a cutout of the FITS image for a given source reading. Args: reading: ossos.astrom.SourceReading The reading which will be the focus of the downloaded image. focus: tuple(int, int) The x, y coordinates that should be the focus of the downloaded image. These coordinates should be in terms of the source_reading parameter's coordinate system. Default value is None, in which case the source reading's x, y position is used as the focus. needs_apcor: bool If True, the apcor file with data needed for photometry calculations is downloaded in addition to the image. Defaults to False. Returns: cutout: ossos.downloads.data.SourceCutout """ logger.debug("Doing download_cutout with inputs: reading:{} focus:{} needs_apcor:{}".format(reading, focus, needs_apcor)) assert isinstance(reading, SourceReading) min_radius = config.read('CUTOUTS.SINGLETS.RADIUS') if not isinstance(min_radius, Quantity): min_radius = min_radius * units.arcsec radius = max(reading.uncertainty_ellipse.a, reading.uncertainty_ellipse.b) * 2.5 + min_radius logger.debug("got radius for cutout: {}".format(radius)) image_uri = reading.get_image_uri() logger.debug("Getting cutout at {} for {}".format(reading.reference_sky_coord, image_uri)) hdulist = storage._cutout_expnum(reading.obs, reading.reference_sky_coord, radius) # hdulist = storage.ra_dec_cutout(image_uri, reading.reference_sky_coord, radius) logger.debug("Getting the aperture correction.") source = SourceCutout(reading, hdulist, radius=radius) # Accessing the attribute here to trigger the download. try: apcor = source.apcor zmag = source.zmag source.reading.get_observation_header() except Exception as ex: if needs_apcor: import sys, traceback sys.stderr.write("Failed to retrieve apcor but apcor required. Raising error, see logs for more details") sys.stderr.write(traceback.print_exc()) pass logger.debug("Sending back the source reading.") return source
python
def download_cutout(self, reading, focus=None, needs_apcor=False): """ Downloads a cutout of the FITS image for a given source reading. Args: reading: ossos.astrom.SourceReading The reading which will be the focus of the downloaded image. focus: tuple(int, int) The x, y coordinates that should be the focus of the downloaded image. These coordinates should be in terms of the source_reading parameter's coordinate system. Default value is None, in which case the source reading's x, y position is used as the focus. needs_apcor: bool If True, the apcor file with data needed for photometry calculations is downloaded in addition to the image. Defaults to False. Returns: cutout: ossos.downloads.data.SourceCutout """ logger.debug("Doing download_cutout with inputs: reading:{} focus:{} needs_apcor:{}".format(reading, focus, needs_apcor)) assert isinstance(reading, SourceReading) min_radius = config.read('CUTOUTS.SINGLETS.RADIUS') if not isinstance(min_radius, Quantity): min_radius = min_radius * units.arcsec radius = max(reading.uncertainty_ellipse.a, reading.uncertainty_ellipse.b) * 2.5 + min_radius logger.debug("got radius for cutout: {}".format(radius)) image_uri = reading.get_image_uri() logger.debug("Getting cutout at {} for {}".format(reading.reference_sky_coord, image_uri)) hdulist = storage._cutout_expnum(reading.obs, reading.reference_sky_coord, radius) # hdulist = storage.ra_dec_cutout(image_uri, reading.reference_sky_coord, radius) logger.debug("Getting the aperture correction.") source = SourceCutout(reading, hdulist, radius=radius) # Accessing the attribute here to trigger the download. try: apcor = source.apcor zmag = source.zmag source.reading.get_observation_header() except Exception as ex: if needs_apcor: import sys, traceback sys.stderr.write("Failed to retrieve apcor but apcor required. Raising error, see logs for more details") sys.stderr.write(traceback.print_exc()) pass logger.debug("Sending back the source reading.") return source
[ "def", "download_cutout", "(", "self", ",", "reading", ",", "focus", "=", "None", ",", "needs_apcor", "=", "False", ")", ":", "logger", ".", "debug", "(", "\"Doing download_cutout with inputs: reading:{} focus:{} needs_apcor:{}\"", ".", "format", "(", "reading", ","...
Downloads a cutout of the FITS image for a given source reading. Args: reading: ossos.astrom.SourceReading The reading which will be the focus of the downloaded image. focus: tuple(int, int) The x, y coordinates that should be the focus of the downloaded image. These coordinates should be in terms of the source_reading parameter's coordinate system. Default value is None, in which case the source reading's x, y position is used as the focus. needs_apcor: bool If True, the apcor file with data needed for photometry calculations is downloaded in addition to the image. Defaults to False. Returns: cutout: ossos.downloads.data.SourceCutout
[ "Downloads", "a", "cutout", "of", "the", "FITS", "image", "for", "a", "given", "source", "reading", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/downloads/cutouts/downloader.py#L46-L99
OSSOS/MOP
src/ossos/core/ossos/planning/obs_planner.py
Plot.load_objects
def load_objects(self, directory_name=None): """Load the targets from a file. """ # for name in Neptune: # self.kbos[name] = Neptune[name] if directory_name is not None: # defaults to looking at .ast files only if directory_name == parameters.REAL_KBO_AST_DIR: kbos = parsers.ossos_discoveries(all_objects=True, data_release=None) else: kbos = parsers.ossos_discoveries(directory_name, all_objects=False, data_release=None) for kbo in kbos: # if kbo.orbit.arc_length > 30.: # cull the short ones for now self.kbos[kbo.name] = kbo.orbit self.kbos[kbo.name].mag = kbo.mag # else: # print("Arc very short, large uncertainty. Skipping {} for now.\n".format(kbo.name)) self.doplot()
python
def load_objects(self, directory_name=None): """Load the targets from a file. """ # for name in Neptune: # self.kbos[name] = Neptune[name] if directory_name is not None: # defaults to looking at .ast files only if directory_name == parameters.REAL_KBO_AST_DIR: kbos = parsers.ossos_discoveries(all_objects=True, data_release=None) else: kbos = parsers.ossos_discoveries(directory_name, all_objects=False, data_release=None) for kbo in kbos: # if kbo.orbit.arc_length > 30.: # cull the short ones for now self.kbos[kbo.name] = kbo.orbit self.kbos[kbo.name].mag = kbo.mag # else: # print("Arc very short, large uncertainty. Skipping {} for now.\n".format(kbo.name)) self.doplot()
[ "def", "load_objects", "(", "self", ",", "directory_name", "=", "None", ")", ":", "# for name in Neptune:", "# self.kbos[name] = Neptune[name]", "if", "directory_name", "is", "not", "None", ":", "# defaults to looking at .ast files only", "if", "directory_name", "==", ...
Load the targets from a file.
[ "Load", "the", "targets", "from", "a", "file", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L219-L240
OSSOS/MOP
src/ossos/core/ossos/planning/obs_planner.py
Plot.p2c
def p2c(self, p=None): """convert from plot to canvas coordinates. See also c2p.""" if p is None: p = [0, 0] x = (p[0] - self.x1) * self.xscale + self.cx1 y = (p[1] - self.y1) * self.yscale + self.cy1 # logging.debug("p2c: ({},{}) -> ({},{})".format(p[0],p[1], x, y)) return (x, y)
python
def p2c(self, p=None): """convert from plot to canvas coordinates. See also c2p.""" if p is None: p = [0, 0] x = (p[0] - self.x1) * self.xscale + self.cx1 y = (p[1] - self.y1) * self.yscale + self.cy1 # logging.debug("p2c: ({},{}) -> ({},{})".format(p[0],p[1], x, y)) return (x, y)
[ "def", "p2c", "(", "self", ",", "p", "=", "None", ")", ":", "if", "p", "is", "None", ":", "p", "=", "[", "0", ",", "0", "]", "x", "=", "(", "p", "[", "0", "]", "-", "self", ".", "x1", ")", "*", "self", ".", "xscale", "+", "self", ".", ...
convert from plot to canvas coordinates. See also c2p.
[ "convert", "from", "plot", "to", "canvas", "coordinates", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L266-L276
OSSOS/MOP
src/ossos/core/ossos/planning/obs_planner.py
Plot.p2s
def p2s(self, p=None): """Convert from plot to screen coordinates""" if not p: p = [0, 0] s = self.p2c(p) return self.c2s(s)
python
def p2s(self, p=None): """Convert from plot to screen coordinates""" if not p: p = [0, 0] s = self.p2c(p) return self.c2s(s)
[ "def", "p2s", "(", "self", ",", "p", "=", "None", ")", ":", "if", "not", "p", ":", "p", "=", "[", "0", ",", "0", "]", "s", "=", "self", ".", "p2c", "(", "p", ")", "return", "self", ".", "c2s", "(", "s", ")" ]
Convert from plot to screen coordinates
[ "Convert", "from", "plot", "to", "screen", "coordinates" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L278-L283
OSSOS/MOP
src/ossos/core/ossos/planning/obs_planner.py
Plot.c2s
def c2s(self, p=None): """Convert from canvas to screen coordinates""" if not p: p = [0, 0] return p[0] - self.canvasx(self.cx1), p[1] - self.canvasy(self.cy2)
python
def c2s(self, p=None): """Convert from canvas to screen coordinates""" if not p: p = [0, 0] return p[0] - self.canvasx(self.cx1), p[1] - self.canvasy(self.cy2)
[ "def", "c2s", "(", "self", ",", "p", "=", "None", ")", ":", "if", "not", "p", ":", "p", "=", "[", "0", ",", "0", "]", "return", "p", "[", "0", "]", "-", "self", ".", "canvasx", "(", "self", ".", "cx1", ")", ",", "p", "[", "1", "]", "-",...
Convert from canvas to screen coordinates
[ "Convert", "from", "canvas", "to", "screen", "coordinates" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L285-L290
OSSOS/MOP
src/ossos/core/ossos/planning/obs_planner.py
Plot.centre
def centre(self): """Return the RA/DEC of the center of the visible Canvas.""" return self.p2s(self.canvasx(self.width / 2.0)), self.p2s(self.canvasy(self.height / 2.0))
python
def centre(self): """Return the RA/DEC of the center of the visible Canvas.""" return self.p2s(self.canvasx(self.width / 2.0)), self.p2s(self.canvasy(self.height / 2.0))
[ "def", "centre", "(", "self", ")", ":", "return", "self", ".", "p2s", "(", "self", ".", "canvasx", "(", "self", ".", "width", "/", "2.0", ")", ")", ",", "self", ".", "p2s", "(", "self", ".", "canvasy", "(", "self", ".", "height", "/", "2.0", ")...
Return the RA/DEC of the center of the visible Canvas.
[ "Return", "the", "RA", "/", "DEC", "of", "the", "center", "of", "the", "visible", "Canvas", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L303-L305
OSSOS/MOP
src/ossos/core/ossos/planning/obs_planner.py
Plot.coord_grid
def coord_grid(self): """Draw a grid of RA/DEC on Canvas.""" ra2 = math.pi * 2 ra1 = 0 dec1 = -1 * math.pi / 2.0 dec2 = math.pi / 2.0 dra = math.fabs((self.x2 - self.x1) / 4.0) ddec = math.fabs((self.y2 - self.y1) / 4.0) logging.debug("Drawing the grid.") label = True ra = ra1 while ra <= ra2: # create a line that goes from dec1 to dec2 at this ra (cx1, cy1) = self.p2c((ra, dec1)) # ly is the y location mid-way between the top and bottom of the plot (cx2, cy2) = self.p2c((ra, dec2)) # create a dashed line for un-labeled lines self.create_line(cx1, cy1, cx2, cy2, dash=(20, 20), fill="grey") dec = dec1 while dec <= dec2: if label: # lx is the the x screen coordinate location of the ra (lx, ly) = self.p2c((ra, dec + ddec / 2.0)) self.create_text(lx + 5, ly, justify=LEFT, font=('Times', '-20'), text="\n".join(str(ephem.hours(ra))[:-3]), fill='grey') label = not label dec += ddec ra += dra # plot dec grid lines bewtween South (dec1) and North (dec2) limits of plot dec = dec1 # should we label the current grid line? label = True while dec <= dec2: # create a line the goes from ra1 to ra2 at this dec (cx1, cy1) = self.p2c((ra1, dec)) (cx2, cy2) = self.p2c((ra2, dec)) self.create_line(cx1, cy1, cx2, cy2, dash=(20, 20), fill='grey') ra = ra1 while ra <= ra2: if label: # lx/ly are the screen coordinates of the label (lx, ly) = self.p2c((ra + dra / 2.0, dec)) self.create_text(lx, ly - 5, font=('Times', '-20'), text=str(ephem.degrees(dec)), fill='grey') ra += dra dec += ddec label = not label
python
def coord_grid(self): """Draw a grid of RA/DEC on Canvas.""" ra2 = math.pi * 2 ra1 = 0 dec1 = -1 * math.pi / 2.0 dec2 = math.pi / 2.0 dra = math.fabs((self.x2 - self.x1) / 4.0) ddec = math.fabs((self.y2 - self.y1) / 4.0) logging.debug("Drawing the grid.") label = True ra = ra1 while ra <= ra2: # create a line that goes from dec1 to dec2 at this ra (cx1, cy1) = self.p2c((ra, dec1)) # ly is the y location mid-way between the top and bottom of the plot (cx2, cy2) = self.p2c((ra, dec2)) # create a dashed line for un-labeled lines self.create_line(cx1, cy1, cx2, cy2, dash=(20, 20), fill="grey") dec = dec1 while dec <= dec2: if label: # lx is the the x screen coordinate location of the ra (lx, ly) = self.p2c((ra, dec + ddec / 2.0)) self.create_text(lx + 5, ly, justify=LEFT, font=('Times', '-20'), text="\n".join(str(ephem.hours(ra))[:-3]), fill='grey') label = not label dec += ddec ra += dra # plot dec grid lines bewtween South (dec1) and North (dec2) limits of plot dec = dec1 # should we label the current grid line? label = True while dec <= dec2: # create a line the goes from ra1 to ra2 at this dec (cx1, cy1) = self.p2c((ra1, dec)) (cx2, cy2) = self.p2c((ra2, dec)) self.create_line(cx1, cy1, cx2, cy2, dash=(20, 20), fill='grey') ra = ra1 while ra <= ra2: if label: # lx/ly are the screen coordinates of the label (lx, ly) = self.p2c((ra + dra / 2.0, dec)) self.create_text(lx, ly - 5, font=('Times', '-20'), text=str(ephem.degrees(dec)), fill='grey') ra += dra dec += ddec label = not label
[ "def", "coord_grid", "(", "self", ")", ":", "ra2", "=", "math", ".", "pi", "*", "2", "ra1", "=", "0", "dec1", "=", "-", "1", "*", "math", ".", "pi", "/", "2.0", "dec2", "=", "math", ".", "pi", "/", "2.0", "dra", "=", "math", ".", "fabs", "(...
Draw a grid of RA/DEC on Canvas.
[ "Draw", "a", "grid", "of", "RA", "/", "DEC", "on", "Canvas", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L327-L377
OSSOS/MOP
src/ossos/core/ossos/planning/obs_planner.py
Plot.tickmark
def tickmark(self, x, y, size=10, orientation=90): """Draw a line of size and orientation at x,y""" (x1, y1) = self.p2c([x, y]) x2 = x1 + size * math.cos(math.radians(orientation)) y2 = y1 - size * math.sin(math.radians(orientation)) self.create_line(x1, y1, x2, y2)
python
def tickmark(self, x, y, size=10, orientation=90): """Draw a line of size and orientation at x,y""" (x1, y1) = self.p2c([x, y]) x2 = x1 + size * math.cos(math.radians(orientation)) y2 = y1 - size * math.sin(math.radians(orientation)) self.create_line(x1, y1, x2, y2)
[ "def", "tickmark", "(", "self", ",", "x", ",", "y", ",", "size", "=", "10", ",", "orientation", "=", "90", ")", ":", "(", "x1", ",", "y1", ")", "=", "self", ".", "p2c", "(", "[", "x", ",", "y", "]", ")", "x2", "=", "x1", "+", "size", "*",...
Draw a line of size and orientation at x,y
[ "Draw", "a", "line", "of", "size", "and", "orientation", "at", "x", "y" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L425-L431
OSSOS/MOP
src/ossos/core/ossos/planning/obs_planner.py
Plot.limits
def limits(self, x1, x2, y1, y2): """Set the coordinate boundaries of plot""" self.x1 = x1 self.x2 = x2 self.y1 = y1 self.y2 = y2 self.xscale = (self.cx2 - self.cx1) / (self.x2 - self.x1) self.yscale = (self.cy2 - self.cy1) / (self.y2 - self.y1) # Determine the limits of the canvas (cx1, cy1) = self.p2c((0, -math.pi / 2.0)) (cx2, cy2) = self.p2c((2 * math.pi, math.pi / 2.0)) # # set the scroll region to the size of the camvas plus a boundary to allow the canvas edge to be at centre self.config(scrollregion=( cx2 - self.width / 2.0, cy2 - self.height / 2.0, cx1 + self.width / 2.0, cy1 + self.height / 2.0))
python
def limits(self, x1, x2, y1, y2): """Set the coordinate boundaries of plot""" self.x1 = x1 self.x2 = x2 self.y1 = y1 self.y2 = y2 self.xscale = (self.cx2 - self.cx1) / (self.x2 - self.x1) self.yscale = (self.cy2 - self.cy1) / (self.y2 - self.y1) # Determine the limits of the canvas (cx1, cy1) = self.p2c((0, -math.pi / 2.0)) (cx2, cy2) = self.p2c((2 * math.pi, math.pi / 2.0)) # # set the scroll region to the size of the camvas plus a boundary to allow the canvas edge to be at centre self.config(scrollregion=( cx2 - self.width / 2.0, cy2 - self.height / 2.0, cx1 + self.width / 2.0, cy1 + self.height / 2.0))
[ "def", "limits", "(", "self", ",", "x1", ",", "x2", ",", "y1", ",", "y2", ")", ":", "self", ".", "x1", "=", "x1", "self", ".", "x2", "=", "x2", "self", ".", "y1", "=", "y1", "self", ".", "y2", "=", "y2", "self", ".", "xscale", "=", "(", "...
Set the coordinate boundaries of plot
[ "Set", "the", "coordinate", "boundaries", "of", "plot" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L439-L454
OSSOS/MOP
src/ossos/core/ossos/planning/obs_planner.py
Plot.reset
def reset(self): """Expand to the full scale""" sun = ephem.Sun() this_time = Time(self.date.get(), scale='utc') sun.compute(this_time.iso) self.sun = Coord((sun.ra, sun.dec)) self.doplot() self.plot_pointings()
python
def reset(self): """Expand to the full scale""" sun = ephem.Sun() this_time = Time(self.date.get(), scale='utc') sun.compute(this_time.iso) self.sun = Coord((sun.ra, sun.dec)) self.doplot() self.plot_pointings()
[ "def", "reset", "(", "self", ")", ":", "sun", "=", "ephem", ".", "Sun", "(", ")", "this_time", "=", "Time", "(", "self", ".", "date", ".", "get", "(", ")", ",", "scale", "=", "'utc'", ")", "sun", ".", "compute", "(", "this_time", ".", "iso", ")...
Expand to the full scale
[ "Expand", "to", "the", "full", "scale" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L456-L465
OSSOS/MOP
src/ossos/core/ossos/planning/obs_planner.py
Plot.updateObj
def updateObj(self, event): """Put this object in the search box""" name = self.objList.get("active") self.SearchVar.set(name) self.object_info.set(str(self.kbos.get(name, ''))) return
python
def updateObj(self, event): """Put this object in the search box""" name = self.objList.get("active") self.SearchVar.set(name) self.object_info.set(str(self.kbos.get(name, ''))) return
[ "def", "updateObj", "(", "self", ",", "event", ")", ":", "name", "=", "self", ".", "objList", ".", "get", "(", "\"active\"", ")", "self", ".", "SearchVar", ".", "set", "(", "name", ")", "self", ".", "object_info", ".", "set", "(", "str", "(", "self...
Put this object in the search box
[ "Put", "this", "object", "in", "the", "search", "box" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L467-L473
OSSOS/MOP
src/ossos/core/ossos/planning/obs_planner.py
Plot.relocate
def relocate(self): """Move to the position of self.SearchVar""" name = self.SearchVar.get() if self.kbos.has_key(name): kbo = self.kbos[name] assert isinstance(kbo, orbfit.Orbfit) this_time = Time(self.date.get(), scale='utc') try: kbo.predict(this_time) self.recenter(kbo.coordinate.ra.radian, kbo.coordinate.dec.radian) self.create_point(kbo.coordinate.ra.radian, kbo.coordinate.dec.radian, color='blue', size=4) except: logging.error("failed to compute KBO position")
python
def relocate(self): """Move to the position of self.SearchVar""" name = self.SearchVar.get() if self.kbos.has_key(name): kbo = self.kbos[name] assert isinstance(kbo, orbfit.Orbfit) this_time = Time(self.date.get(), scale='utc') try: kbo.predict(this_time) self.recenter(kbo.coordinate.ra.radian, kbo.coordinate.dec.radian) self.create_point(kbo.coordinate.ra.radian, kbo.coordinate.dec.radian, color='blue', size=4) except: logging.error("failed to compute KBO position")
[ "def", "relocate", "(", "self", ")", ":", "name", "=", "self", ".", "SearchVar", ".", "get", "(", ")", "if", "self", ".", "kbos", ".", "has_key", "(", "name", ")", ":", "kbo", "=", "self", ".", "kbos", "[", "name", "]", "assert", "isinstance", "(...
Move to the position of self.SearchVar
[ "Move", "to", "the", "position", "of", "self", ".", "SearchVar" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L475-L488
OSSOS/MOP
src/ossos/core/ossos/planning/obs_planner.py
Plot.zoom
def zoom(self, event=None, scale=2.0): """Zoom in""" # compute the x,y of the center of the screen sx1 = (self.cx1 + self.cx2) / 2.0 sy1 = (self.cy1 + self.cy2) / 2.0 if not event is None: logging.debug("EVENT: {},{}".format(event.x, event.y)) sx1 = event.x sy1 = event.y (x, y) = self.c2p((self.canvasx(sx1), self.canvasy(sy1))) xw = (self.x2 - self.x1) / 2.0 / scale yw = (self.y2 - self.y1) / 2.0 / scale # reset the limits to be centered at x,y with # area of xw*2,y2*2 self.limits(x - xw, x + xw, y - yw, y + yw) self.delete(ALL) self.doplot()
python
def zoom(self, event=None, scale=2.0): """Zoom in""" # compute the x,y of the center of the screen sx1 = (self.cx1 + self.cx2) / 2.0 sy1 = (self.cy1 + self.cy2) / 2.0 if not event is None: logging.debug("EVENT: {},{}".format(event.x, event.y)) sx1 = event.x sy1 = event.y (x, y) = self.c2p((self.canvasx(sx1), self.canvasy(sy1))) xw = (self.x2 - self.x1) / 2.0 / scale yw = (self.y2 - self.y1) / 2.0 / scale # reset the limits to be centered at x,y with # area of xw*2,y2*2 self.limits(x - xw, x + xw, y - yw, y + yw) self.delete(ALL) self.doplot()
[ "def", "zoom", "(", "self", ",", "event", "=", "None", ",", "scale", "=", "2.0", ")", ":", "# compute the x,y of the center of the screen", "sx1", "=", "(", "self", ".", "cx1", "+", "self", ".", "cx2", ")", "/", "2.0", "sy1", "=", "(", "self", ".", "...
Zoom in
[ "Zoom", "in" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L514-L535
OSSOS/MOP
src/ossos/core/ossos/planning/obs_planner.py
Plot.create_point
def create_point(self, xcen, ycen, size=10, color='red', fill=None): """Plot a circle of size at this x,y location""" if fill is None: fill = color (x, y) = self.p2c((xcen, ycen)) x1 = x - size x2 = x + size y1 = y - size y2 = y + size self.create_rectangle(x1, y1, x2, y2, fill=fill, outline=color)
python
def create_point(self, xcen, ycen, size=10, color='red', fill=None): """Plot a circle of size at this x,y location""" if fill is None: fill = color (x, y) = self.p2c((xcen, ycen)) x1 = x - size x2 = x + size y1 = y - size y2 = y + size self.create_rectangle(x1, y1, x2, y2, fill=fill, outline=color)
[ "def", "create_point", "(", "self", ",", "xcen", ",", "ycen", ",", "size", "=", "10", ",", "color", "=", "'red'", ",", "fill", "=", "None", ")", ":", "if", "fill", "is", "None", ":", "fill", "=", "color", "(", "x", ",", "y", ")", "=", "self", ...
Plot a circle of size at this x,y location
[ "Plot", "a", "circle", "of", "size", "at", "this", "x", "y", "location" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L559-L570
OSSOS/MOP
src/ossos/core/ossos/planning/obs_planner.py
Plot.current_pointing
def current_pointing(self, index): """set the color of the currently selected pointing to 'blue'""" if self.current is not None: for item in self.pointings[self.current]['items']: self.itemconfigure(item, outline="black") self.current = index for item in self.pointings[self.current]['items']: self.itemconfigure(item, outline="blue")
python
def current_pointing(self, index): """set the color of the currently selected pointing to 'blue'""" if self.current is not None: for item in self.pointings[self.current]['items']: self.itemconfigure(item, outline="black") self.current = index for item in self.pointings[self.current]['items']: self.itemconfigure(item, outline="blue")
[ "def", "current_pointing", "(", "self", ",", "index", ")", ":", "if", "self", ".", "current", "is", "not", "None", ":", "for", "item", "in", "self", ".", "pointings", "[", "self", ".", "current", "]", "[", "'items'", "]", ":", "self", ".", "itemconfi...
set the color of the currently selected pointing to 'blue
[ "set", "the", "color", "of", "the", "currently", "selected", "pointing", "to", "blue" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L572-L579
OSSOS/MOP
src/ossos/core/ossos/planning/obs_planner.py
Plot.delete_pointing
def delete_pointing(self, event): """Delete the currently active pointing""" if self.current is None: return for item in self.pointings[self.current]['items']: self.delete(item) self.delete(self.pointings[self.current]['label']['id']) del (self.pointings[self.current]) self.current = None
python
def delete_pointing(self, event): """Delete the currently active pointing""" if self.current is None: return for item in self.pointings[self.current]['items']: self.delete(item) self.delete(self.pointings[self.current]['label']['id']) del (self.pointings[self.current]) self.current = None
[ "def", "delete_pointing", "(", "self", ",", "event", ")", ":", "if", "self", ".", "current", "is", "None", ":", "return", "for", "item", "in", "self", ".", "pointings", "[", "self", ".", "current", "]", "[", "'items'", "]", ":", "self", ".", "delete"...
Delete the currently active pointing
[ "Delete", "the", "currently", "active", "pointing" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L581-L591
OSSOS/MOP
src/ossos/core/ossos/planning/obs_planner.py
Plot.load_pointings
def load_pointings(self, filename=None): """Load some pointings""" filename = ( filename is None and tkFileDialog.askopenfilename() or filename) if filename is None: return f = storage.open_vos_or_local(filename) lines = f.readlines() f.close() points = [] if lines[0][0:5] == "<?xml": # ## assume astrores format # ## with <DATA at start of 'data' segment for i in range(len(lines)): if lines[i][0:5] == '<DATA': break for j in range(i + 5, len(lines)): if lines[j][0:2] == "]]": break vs = lines[j].split('|') points.append(vs) elif lines[0][0:5] == 'index': # ## Palomar Format # ## OK.. ID/NAME/RA /DEC format v = lines[0].split() if len(v) == 2 : date = v[1] self.date.set(v[1]) self.reset() for line in lines: if line[0] == '!' or line[0:5] == 'index': # index is a header line for Palomar continue d = line.split() if len(d) < 9: sys.stderr.write("Don't understand pointing format\n%s\n" % line) continue ras = "%s:%s:%s" % (d[2], d[3], d[4]) decs = "%s:%s:%s" % (d[5], d[6], d[7]) points.append((d[1].strip(), ras, decs)) elif lines[0][0:5] == "#SSIM": # ## Survey Simulator format for line in lines[1:]: d = line.split() points.append((d[8], d[2], d[3])) else: # ## try name/ ra /dec / epoch for line in lines: d = line.split() if len(d) == 5: # brave assumption time! # self.pointing_format = 'Subaru' # unfortunately this doesn't seem to do anything, & breaks save pointing_name = d[0].split('=')[0] # oh grief these are sexagecimal with no separators. WHY ra = d[1].split('=')[1] dec = d[2].split('=')[1] if len(ra.split('.')[0]) == 5: # LACK OF SEPARATORS ARGH ra = '0' + ra if len(dec.split('.')[0]) == 5: dec = '0' + dec ra = "{}:{}:{}".format(ra[0:2], ra[2:4], ra[4:]) dec = "{}:{}:{}".format(dec[0:2], dec[2:4], dec[4:]) points.append((pointing_name, ra, dec)) elif len(d) == 4: f = d[1].count(":") if ( f > 0 ): points.append((d[0], d[1], d[2])) else: points.append(('', math.radians(float(d[1])), math.radians(float(d[2])))) elif len(d) == 8: line = "%s %s:%s:%s %s:%s:%s %s" % (d[0], d[1], d[2], d[3], d[4], d[5], d[6], d[7] ) d = line.split() # this one seems unfinished...no append else: sys.stderr.write("Don't understand pointing format\n%s\n" % ( line)) continue self.plot_points_list(points) return
python
def load_pointings(self, filename=None): """Load some pointings""" filename = ( filename is None and tkFileDialog.askopenfilename() or filename) if filename is None: return f = storage.open_vos_or_local(filename) lines = f.readlines() f.close() points = [] if lines[0][0:5] == "<?xml": # ## assume astrores format # ## with <DATA at start of 'data' segment for i in range(len(lines)): if lines[i][0:5] == '<DATA': break for j in range(i + 5, len(lines)): if lines[j][0:2] == "]]": break vs = lines[j].split('|') points.append(vs) elif lines[0][0:5] == 'index': # ## Palomar Format # ## OK.. ID/NAME/RA /DEC format v = lines[0].split() if len(v) == 2 : date = v[1] self.date.set(v[1]) self.reset() for line in lines: if line[0] == '!' or line[0:5] == 'index': # index is a header line for Palomar continue d = line.split() if len(d) < 9: sys.stderr.write("Don't understand pointing format\n%s\n" % line) continue ras = "%s:%s:%s" % (d[2], d[3], d[4]) decs = "%s:%s:%s" % (d[5], d[6], d[7]) points.append((d[1].strip(), ras, decs)) elif lines[0][0:5] == "#SSIM": # ## Survey Simulator format for line in lines[1:]: d = line.split() points.append((d[8], d[2], d[3])) else: # ## try name/ ra /dec / epoch for line in lines: d = line.split() if len(d) == 5: # brave assumption time! # self.pointing_format = 'Subaru' # unfortunately this doesn't seem to do anything, & breaks save pointing_name = d[0].split('=')[0] # oh grief these are sexagecimal with no separators. WHY ra = d[1].split('=')[1] dec = d[2].split('=')[1] if len(ra.split('.')[0]) == 5: # LACK OF SEPARATORS ARGH ra = '0' + ra if len(dec.split('.')[0]) == 5: dec = '0' + dec ra = "{}:{}:{}".format(ra[0:2], ra[2:4], ra[4:]) dec = "{}:{}:{}".format(dec[0:2], dec[2:4], dec[4:]) points.append((pointing_name, ra, dec)) elif len(d) == 4: f = d[1].count(":") if ( f > 0 ): points.append((d[0], d[1], d[2])) else: points.append(('', math.radians(float(d[1])), math.radians(float(d[2])))) elif len(d) == 8: line = "%s %s:%s:%s %s:%s:%s %s" % (d[0], d[1], d[2], d[3], d[4], d[5], d[6], d[7] ) d = line.split() # this one seems unfinished...no append else: sys.stderr.write("Don't understand pointing format\n%s\n" % ( line)) continue self.plot_points_list(points) return
[ "def", "load_pointings", "(", "self", ",", "filename", "=", "None", ")", ":", "filename", "=", "(", "filename", "is", "None", "and", "tkFileDialog", ".", "askopenfilename", "(", ")", "or", "filename", ")", "if", "filename", "is", "None", ":", "return", "...
Load some pointings
[ "Load", "some", "pointings" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L593-L672
OSSOS/MOP
src/ossos/core/ossos/planning/obs_planner.py
Plot.create_pointing
def create_pointing(self, event, label_text=None): """Plot the sky coverage of pointing at event.x,event.y on the canvas. """ x = self.canvasx(event.x) y = self.canvasy(event.y) (ra, dec) = self.c2p((x, y)) this_camera = Camera(ra=float(ra) * units.radian, dec=float(dec)*units.radian, camera=self.camera.get()) ccds = numpy.radians(numpy.array(this_camera.geometry)) items = [] for ccd in ccds: if len(ccd) == 4: (x1, y1) = self.p2c((ccd[0], ccd[1])) (x2, y2) = self.p2c((ccd[2], ccd[3])) item = self.create_rectangle(x1, y1, x2, y2, stipple='gray25', fill=None) else: (x1, y1) = self.p2c((ccd[0] - ccd[2], ccd[1] - ccd[2])) (x2, y2) = self.p2c((ccd[0] + ccd[2], ccd[1] + ccd[2])) item = self.create_oval(x1, y1, x2, y2) items.append(item) label = {} if label_text is None: label_text = self.plabel.get() label['text'] = label_text label['id'] = self.label(this_camera.ra.radian, this_camera.dec.radian, label['text']) self.pointings.append({ "label": label, "items": items, "camera": this_camera}) self.current = len(self.pointings) - 1 self.current_pointing(len(self.pointings) - 1)
python
def create_pointing(self, event, label_text=None): """Plot the sky coverage of pointing at event.x,event.y on the canvas. """ x = self.canvasx(event.x) y = self.canvasy(event.y) (ra, dec) = self.c2p((x, y)) this_camera = Camera(ra=float(ra) * units.radian, dec=float(dec)*units.radian, camera=self.camera.get()) ccds = numpy.radians(numpy.array(this_camera.geometry)) items = [] for ccd in ccds: if len(ccd) == 4: (x1, y1) = self.p2c((ccd[0], ccd[1])) (x2, y2) = self.p2c((ccd[2], ccd[3])) item = self.create_rectangle(x1, y1, x2, y2, stipple='gray25', fill=None) else: (x1, y1) = self.p2c((ccd[0] - ccd[2], ccd[1] - ccd[2])) (x2, y2) = self.p2c((ccd[0] + ccd[2], ccd[1] + ccd[2])) item = self.create_oval(x1, y1, x2, y2) items.append(item) label = {} if label_text is None: label_text = self.plabel.get() label['text'] = label_text label['id'] = self.label(this_camera.ra.radian, this_camera.dec.radian, label['text']) self.pointings.append({ "label": label, "items": items, "camera": this_camera}) self.current = len(self.pointings) - 1 self.current_pointing(len(self.pointings) - 1)
[ "def", "create_pointing", "(", "self", ",", "event", ",", "label_text", "=", "None", ")", ":", "x", "=", "self", ".", "canvasx", "(", "event", ".", "x", ")", "y", "=", "self", ".", "canvasy", "(", "event", ".", "y", ")", "(", "ra", ",", "dec", ...
Plot the sky coverage of pointing at event.x,event.y on the canvas.
[ "Plot", "the", "sky", "coverage", "of", "pointing", "at", "event", ".", "x", "event", ".", "y", "on", "the", "canvas", "." ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L715-L746
OSSOS/MOP
src/ossos/core/ossos/planning/obs_planner.py
Plot.plot_pointings
def plot_pointings(self, pointings=None): """Plot pointings on canavs""" if pointings is None: pointings = self.pointings i = 0 for pointing in pointings: items = [] i = i + 1 label = {} label['text'] = pointing['label']['text'] for ccd in numpy.radians(pointing["camera"].geometry): if len(ccd) == 4: ccd = numpy.radians(numpy.array(ccd)) (x1, y1) = self.p2c((ccd[0], ccd[1])) (x2, y2) = self.p2c((ccd[2], ccd[3])) item = self.create_rectangle(x1, y1, x2, y2, stipple='gray25', fill=pointing.get('color', '')) else: (x1, y1) = self.p2c((ccd[0] - ccd[2]), ccd[1] - ccd[2]) (x2, y2) = self.p2c((ccd[0] + ccd[2]), ccd[1] + ccd[2]) item = self.create_oval(x1, y1, x2, y2) items.append(item) if self.show_labels.get() == 1: label['id'] = self.label(pointing["camera"].ra.radian, pointing["camera"].dec.radian, label['text']) pointing["items"] = items pointing["label"] = label
python
def plot_pointings(self, pointings=None): """Plot pointings on canavs""" if pointings is None: pointings = self.pointings i = 0 for pointing in pointings: items = [] i = i + 1 label = {} label['text'] = pointing['label']['text'] for ccd in numpy.radians(pointing["camera"].geometry): if len(ccd) == 4: ccd = numpy.radians(numpy.array(ccd)) (x1, y1) = self.p2c((ccd[0], ccd[1])) (x2, y2) = self.p2c((ccd[2], ccd[3])) item = self.create_rectangle(x1, y1, x2, y2, stipple='gray25', fill=pointing.get('color', '')) else: (x1, y1) = self.p2c((ccd[0] - ccd[2]), ccd[1] - ccd[2]) (x2, y2) = self.p2c((ccd[0] + ccd[2]), ccd[1] + ccd[2]) item = self.create_oval(x1, y1, x2, y2) items.append(item) if self.show_labels.get() == 1: label['id'] = self.label(pointing["camera"].ra.radian, pointing["camera"].dec.radian, label['text']) pointing["items"] = items pointing["label"] = label
[ "def", "plot_pointings", "(", "self", ",", "pointings", "=", "None", ")", ":", "if", "pointings", "is", "None", ":", "pointings", "=", "self", ".", "pointings", "i", "=", "0", "for", "pointing", "in", "pointings", ":", "items", "=", "[", "]", "i", "=...
Plot pointings on canavs
[ "Plot", "pointings", "on", "canavs" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L748-L774
OSSOS/MOP
src/ossos/core/ossos/planning/obs_planner.py
Plot.set_pointing_label
def set_pointing_label(self): """Let the label of the current pointing to the value in the plabel box""" current = self.current pointing = self.pointings[current] self.delete_pointing(None) pointing["label"]['text'] = self.plabel.get() self.pointings.append(pointing) self.plot_pointings([pointing]) self.current = current
python
def set_pointing_label(self): """Let the label of the current pointing to the value in the plabel box""" current = self.current pointing = self.pointings[current] self.delete_pointing(None) pointing["label"]['text'] = self.plabel.get() self.pointings.append(pointing) self.plot_pointings([pointing]) self.current = current
[ "def", "set_pointing_label", "(", "self", ")", ":", "current", "=", "self", ".", "current", "pointing", "=", "self", ".", "pointings", "[", "current", "]", "self", ".", "delete_pointing", "(", "None", ")", "pointing", "[", "\"label\"", "]", "[", "'text'", ...
Let the label of the current pointing to the value in the plabel box
[ "Let", "the", "label", "of", "the", "current", "pointing", "to", "the", "value", "in", "the", "plabel", "box" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L776-L785
OSSOS/MOP
src/ossos/core/ossos/planning/obs_planner.py
Plot.move_pointing
def move_pointing(self, event): """Grab nearest pointing to event.x,event.y and with cursor""" (ra, dec) = self.c2p((self.canvasx(event.x), self.canvasy(event.y))) closest = None this_pointing = None this_index = -1 index = -1 for pointing in self.pointings: index = index + 1 # Find the camera we clicked closest too ds = pointing["camera"].separation(ra, dec) if this_pointing is None or ds < closest: this_index = index closest = ds this_pointing = pointing if this_pointing is None: return self.plabel.set(this_pointing['label']['text']) this_pointing["camera"].set_coord((ra*units.radian, dec*units.radian)) ccds = numpy.radians(this_pointing["camera"].geometry) items = this_pointing["items"] label = this_pointing["label"] (x1, y1) = self.p2c((this_pointing["camera"].ra.radian, this_pointing["camera"].dec.radian)) self.coords(label["id"], x1, y1) for i in range(len(ccds)): ccd = ccds[i] item = items[i] if len(ccd) == 4: (x1, y1) = self.p2c((ccd[0], ccd[1])) (x2, y2) = self.p2c((ccd[2], ccd[3])) else: (x1, y1) = self.p2c((ccd[0] - ccd[2]), ccd[1] - ccd[2]) (x2, y2) = self.p2c((ccd[0] + ccd[2]), ccd[1] + ccd[2]) self.coords(item, x1, y1, x2, y2) self.current_pointing(this_index)
python
def move_pointing(self, event): """Grab nearest pointing to event.x,event.y and with cursor""" (ra, dec) = self.c2p((self.canvasx(event.x), self.canvasy(event.y))) closest = None this_pointing = None this_index = -1 index = -1 for pointing in self.pointings: index = index + 1 # Find the camera we clicked closest too ds = pointing["camera"].separation(ra, dec) if this_pointing is None or ds < closest: this_index = index closest = ds this_pointing = pointing if this_pointing is None: return self.plabel.set(this_pointing['label']['text']) this_pointing["camera"].set_coord((ra*units.radian, dec*units.radian)) ccds = numpy.radians(this_pointing["camera"].geometry) items = this_pointing["items"] label = this_pointing["label"] (x1, y1) = self.p2c((this_pointing["camera"].ra.radian, this_pointing["camera"].dec.radian)) self.coords(label["id"], x1, y1) for i in range(len(ccds)): ccd = ccds[i] item = items[i] if len(ccd) == 4: (x1, y1) = self.p2c((ccd[0], ccd[1])) (x2, y2) = self.p2c((ccd[2], ccd[3])) else: (x1, y1) = self.p2c((ccd[0] - ccd[2]), ccd[1] - ccd[2]) (x2, y2) = self.p2c((ccd[0] + ccd[2]), ccd[1] + ccd[2]) self.coords(item, x1, y1, x2, y2) self.current_pointing(this_index)
[ "def", "move_pointing", "(", "self", ",", "event", ")", ":", "(", "ra", ",", "dec", ")", "=", "self", ".", "c2p", "(", "(", "self", ".", "canvasx", "(", "event", ".", "x", ")", ",", "self", ".", "canvasy", "(", "event", ".", "y", ")", ")", ")...
Grab nearest pointing to event.x,event.y and with cursor
[ "Grab", "nearest", "pointing", "to", "event", ".", "x", "event", ".", "y", "and", "with", "cursor" ]
train
https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L787-L823