repository_name stringlengths 7 55 | func_path_in_repository stringlengths 4 223 | func_name stringlengths 1 134 | whole_func_string stringlengths 75 104k | language stringclasses 1
value | func_code_string stringlengths 75 104k | func_code_tokens listlengths 19 28.4k | func_documentation_string stringlengths 1 46.9k | func_documentation_tokens listlengths 1 1.97k | split_name stringclasses 1
value | func_code_url stringlengths 87 315 |
|---|---|---|---|---|---|---|---|---|---|---|
OSSOS/MOP | src/ossos/core/ossos/planning/obs_planner.py | Plot.ossos_pointings | def ossos_pointings(self):
"""
plot an OSSOS observation on the OSSOS plot.
"""
match = re.match('(\d+)\D(\d+)', self.expnum.get())
if match is not None:
expnum = int(match.group(1))
ccd = int(match.group(2))
x = 2112 / 2.0
y = 4644 / 2.0
else:
expnum = int(str(self.expnum.get()))
ccd = 22
x = 1000
y = 4644 - 15 / 0.185
header = None
try:
header = storage.get_astheader(expnum, ccd=ccd)
except:
if header is None:
print "Didn't get a header... "
return
ossos_wcs = wcs.WCS(header)
(ra, dec) = ossos_wcs.xy2sky(x, y)
class MyEvent(object):
def __init__(self, x, y):
self.x = x
self.y = y
(x, y) = self.p2s((math.radians(ra), math.radians(dec)))
event = MyEvent(x, y)
self.create_pointing(event, label_text=header['OBJECT'] + ' ccd{}'.format(ccd)) | python | def ossos_pointings(self):
"""
plot an OSSOS observation on the OSSOS plot.
"""
match = re.match('(\d+)\D(\d+)', self.expnum.get())
if match is not None:
expnum = int(match.group(1))
ccd = int(match.group(2))
x = 2112 / 2.0
y = 4644 / 2.0
else:
expnum = int(str(self.expnum.get()))
ccd = 22
x = 1000
y = 4644 - 15 / 0.185
header = None
try:
header = storage.get_astheader(expnum, ccd=ccd)
except:
if header is None:
print "Didn't get a header... "
return
ossos_wcs = wcs.WCS(header)
(ra, dec) = ossos_wcs.xy2sky(x, y)
class MyEvent(object):
def __init__(self, x, y):
self.x = x
self.y = y
(x, y) = self.p2s((math.radians(ra), math.radians(dec)))
event = MyEvent(x, y)
self.create_pointing(event, label_text=header['OBJECT'] + ' ccd{}'.format(ccd)) | [
"def",
"ossos_pointings",
"(",
"self",
")",
":",
"match",
"=",
"re",
".",
"match",
"(",
"'(\\d+)\\D(\\d+)'",
",",
"self",
".",
"expnum",
".",
"get",
"(",
")",
")",
"if",
"match",
"is",
"not",
"None",
":",
"expnum",
"=",
"int",
"(",
"match",
".",
"g... | plot an OSSOS observation on the OSSOS plot. | [
"plot",
"an",
"OSSOS",
"observation",
"on",
"the",
"OSSOS",
"plot",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L831-L864 |
OSSOS/MOP | src/ossos/core/ossos/planning/obs_planner.py | Plot.get_pointings | def get_pointings(self):
"""
Retrieve the MEGACAM pointings that overlap with the current FOV and plot.
@return: None
"""
self.camera.set("MEGACAM_40")
(ra1, dec1) = self.c2p((self.canvasx(1), self.canvasy(1)))
(ra2, dec2) = self.c2p((self.canvasx(480 * 2), self.canvasy(360 * 2)))
ra_cen = math.degrees((ra2 + ra1) / 2.0)
dec_cen = math.degrees((dec2 + dec1) / 2.0)
# width = math.degrees(math.fabs(ra1 - ra2))
width = 180
# height = math.degrees(math.fabs(dec2 - dec1))
height = 90
date = mpc.Time(self.date.get(), scale='utc').iso
table = cadc.cfht_megacam_tap_query(ra_cen, dec_cen, width, height, date=date)
for row in table:
ra = row['RAJ2000']
dec = row['DEJ2000']
(x, y) = self.p2s((math.radians(ra), math.radians(dec)))
event = MyEvent(x, y)
self.create_pointing(event, label_text="") | python | def get_pointings(self):
"""
Retrieve the MEGACAM pointings that overlap with the current FOV and plot.
@return: None
"""
self.camera.set("MEGACAM_40")
(ra1, dec1) = self.c2p((self.canvasx(1), self.canvasy(1)))
(ra2, dec2) = self.c2p((self.canvasx(480 * 2), self.canvasy(360 * 2)))
ra_cen = math.degrees((ra2 + ra1) / 2.0)
dec_cen = math.degrees((dec2 + dec1) / 2.0)
# width = math.degrees(math.fabs(ra1 - ra2))
width = 180
# height = math.degrees(math.fabs(dec2 - dec1))
height = 90
date = mpc.Time(self.date.get(), scale='utc').iso
table = cadc.cfht_megacam_tap_query(ra_cen, dec_cen, width, height, date=date)
for row in table:
ra = row['RAJ2000']
dec = row['DEJ2000']
(x, y) = self.p2s((math.radians(ra), math.radians(dec)))
event = MyEvent(x, y)
self.create_pointing(event, label_text="") | [
"def",
"get_pointings",
"(",
"self",
")",
":",
"self",
".",
"camera",
".",
"set",
"(",
"\"MEGACAM_40\"",
")",
"(",
"ra1",
",",
"dec1",
")",
"=",
"self",
".",
"c2p",
"(",
"(",
"self",
".",
"canvasx",
"(",
"1",
")",
",",
"self",
".",
"canvasy",
"("... | Retrieve the MEGACAM pointings that overlap with the current FOV and plot.
@return: None | [
"Retrieve",
"the",
"MEGACAM",
"pointings",
"that",
"overlap",
"with",
"the",
"current",
"FOV",
"and",
"plot",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L867-L891 |
OSSOS/MOP | src/ossos/core/ossos/planning/obs_planner.py | Plot.save_pointings | def save_pointings(self):
"""Print the currently defined FOVs"""
i = 0
if self.pointing_format.get() in ['GEMINI ET', 'CFHT ET', 'CFHT API']:
logging.info('Beginning table pointing save.')
for pointing in self.pointings:
name = pointing["label"]["text"]
camera = pointing["camera"]
ccds = numpy.radians(camera.geometry)
polygons = []
for ccd in ccds:
polygon = Polygon.Polygon(((ccd[0], ccd[1]),
(ccd[0], ccd[3]),
(ccd[2], ccd[3]),
(ccd[2], ccd[1]),
(ccd[0], ccd[1])))
polygons.append(polygon)
et = EphemTarget(name, ephem_format=self.pointing_format.get())
# determine the mean motion of target KBOs in this field.
field_kbos = []
center_ra = 0
center_dec = 0
pointing_date = mpc.Time(self.date.get(), scale='utc')
start_date = mpc.Time(self.date.get(), scale='utc') - TimeDelta(8.1*units.day)
end_date = start_date + TimeDelta(17*units.day)
time_step = TimeDelta(3.0*units.hour)
# Compute the mean position of KBOs in the field on current date.
for kbo_name, kbo in self.kbos.items():
if kbo_name in Neptune or kbo_name in tracking_termination:
print 'skipping', kbo_name
continue
kbo.predict(pointing_date)
ra = kbo.coordinate.ra
dec = kbo.coordinate.dec
if kbo_name in name:
print "{} matches pointing {} by name, adding to field.".format(kbo_name, name)
field_kbos.append(kbo)
center_ra += ra.radian
center_dec += dec.radian
else:
for polygon in polygons:
if polygon.isInside(ra.radian, dec.radian):
print "{} inside pointing {} polygon, adding to field.".format(kbo_name, name)
field_kbos.append(kbo)
center_ra += ra.radian
center_dec += dec.radian
# logging.critical("KBOs in field {0}: {1}".format(name, ', '.join([n.name for n in field_kbos])))
today = start_date
while today < end_date:
today += time_step
mean_motion = (0, 0)
max_mag = 0.0
if len(field_kbos) > 0:
current_ra = 0
current_dec = 0
for kbo in field_kbos:
kbo.predict(today)
max_mag = max(max_mag, kbo.mag)
current_ra += kbo.coordinate.ra.radian
current_dec += kbo.coordinate.dec.radian
mean_motion = ((current_ra - center_ra) / len(field_kbos),
(current_dec - center_dec) / len(field_kbos))
ra = pointing['camera'].coordinate.ra.radian + mean_motion[0]
dec = pointing['camera'].coordinate.dec.radian + mean_motion[1]
cc = SkyCoord(ra=ra,
dec=dec,
unit=(units.radian, units.radian),
obstime=today)
dt = pointing_date - today
cc.dra = (mean_motion[0] * units.radian / dt.to(units.hour)).to(units.arcsec/units.hour).value*math.cos(dec)
cc.ddec = (mean_motion[1] * units.radian / dt.to(units.hour)).to(units.arcsec/units.hour).value
cc.mag = max_mag
et.append(cc)
et.save()
return
f = tkFileDialog.asksaveasfile()
if self.pointing_format.get() == 'Subaru':
for pointing in self.pointings:
(sra, sdec) = str(pointing["camera"]).split()
ra = sra.replace(":", "")
dec = sdec.replace(":", "")
name = pointing["label"]["text"]
f.write("""{}=OBJECT="{}" RA={} DEC={} EQUINOX=2000.0 INSROT_PA=90\n""".format(name,
name,
ra,
dec))
return
if self.pointing_format.get() == 'CFHT PH':
f.write("""<?xml version = "1.0"?>
<!DOCTYPE ASTRO SYSTEM "http://vizier.u-strasbg.fr/xml/astrores.dtd">
<ASTRO ID="v0.8" xmlns:ASTRO="http://vizier.u-strasbg.fr/doc/astrores.htx">
<TABLE ID="Table">
<NAME>Fixed Targets</NAME>
<TITLE>Fixed Targets for CFHT QSO</TITLE>
<!-- Definition of each field -->
<FIELD name="NAME" datatype="A" width="20">
<DESCRIPTION>Name of target</DESCRIPTION>
</FIELD>
<FIELD name="RA" ref="" datatype="A" width="11" unit=""h:m:s"">
<DESCRIPTION>Right ascension of target</DESCRIPTION>
</FIELD>
<FIELD name="DEC" ref="" datatype="A" width="11" unit=""d:m:s"">
<DESCRIPTION>Declination of target</DESCRIPTION>
</FIELD>
<FIELD name="EPOCH" datatype="F" width="6">
<DESCRIPTION>Epoch of coordinates</DESCRIPTION>
</FIELD>
<FIELD name="POINT" datatype="A" width="5">
<DESCRIPTION>Pointing name</DESCRIPTION>
</FIELD>
<!-- Data table -->
<DATA><CSV headlines="4" colsep="|"><![CDATA[
NAME |RA |DEC |EPOCH |POINT|
|hh:mm:ss.ss|+dd:mm:ss.s| | |
12345678901234567890|12345678901|12345678901|123456|12345|
--------------------|-----------|-----------|------|-----|\n""")
if self.pointing_format.get() == 'Palomar':
f.write("index\n")
for pointing in self.pointings:
i = i + 1
name = pointing["label"]["text"]
(sra, sdec) = str(pointing["camera"]).split()
ra = sra.split(":")
dec = sdec.split(":")
dec[0] = str(int(dec[0]))
if int(dec[0]) >= 0:
dec[0] = '+' + dec[0]
if self.pointing_format.get() == 'Palomar':
f.write("%5d %16s %2s %2s %4s %3s %2s %4s 2000\n" % (i, name,
ra[0].zfill(2),
ra[1].zfill(2),
ra[2].zfill(2),
dec[0].zfill(3),
dec[1].zfill(2),
dec[2].zfill(2)))
elif self.pointing_format.get() == 'CFHT PH':
# f.write("%f %f\n" % (pointing["camera"].ra,pointing["camera"].dec))
f.write("%-20s|%11s|%11s|%6.1f|%-5d|\n" % (name, sra, sdec, 2000.0, 1))
elif self.pointing_format.get() == 'KPNO/CTIO':
str1 = sra.replace(":", " ")
str2 = sdec.replace(":", " ")
f.write("%16s %16s %16s 2000\n" % ( name, str1, str2))
elif self.pointing_format.get() == 'SSim':
ra = []
dec = []
for ccd in pointing["camera"].geometry:
ra.append(ccd[0])
ra.append(ccd[2])
dec.append(ccd[1])
dec.append(ccd[3])
dra = math.degrees(math.fabs(max(ra) - min(ra)))
ddec = math.degrees(math.fabs(max(dec) - min(dec)))
f.write("%f %f %16s %16s DATE 1.00 1.00 500 FILE\n" % (dra, ddec, sra, sdec ))
if self.pointing_format.get() == 'CFHT PH':
f.write("""]]</CSV></DATA>
</TABLE>
</ASTRO>
""")
f.close() | python | def save_pointings(self):
"""Print the currently defined FOVs"""
i = 0
if self.pointing_format.get() in ['GEMINI ET', 'CFHT ET', 'CFHT API']:
logging.info('Beginning table pointing save.')
for pointing in self.pointings:
name = pointing["label"]["text"]
camera = pointing["camera"]
ccds = numpy.radians(camera.geometry)
polygons = []
for ccd in ccds:
polygon = Polygon.Polygon(((ccd[0], ccd[1]),
(ccd[0], ccd[3]),
(ccd[2], ccd[3]),
(ccd[2], ccd[1]),
(ccd[0], ccd[1])))
polygons.append(polygon)
et = EphemTarget(name, ephem_format=self.pointing_format.get())
# determine the mean motion of target KBOs in this field.
field_kbos = []
center_ra = 0
center_dec = 0
pointing_date = mpc.Time(self.date.get(), scale='utc')
start_date = mpc.Time(self.date.get(), scale='utc') - TimeDelta(8.1*units.day)
end_date = start_date + TimeDelta(17*units.day)
time_step = TimeDelta(3.0*units.hour)
# Compute the mean position of KBOs in the field on current date.
for kbo_name, kbo in self.kbos.items():
if kbo_name in Neptune or kbo_name in tracking_termination:
print 'skipping', kbo_name
continue
kbo.predict(pointing_date)
ra = kbo.coordinate.ra
dec = kbo.coordinate.dec
if kbo_name in name:
print "{} matches pointing {} by name, adding to field.".format(kbo_name, name)
field_kbos.append(kbo)
center_ra += ra.radian
center_dec += dec.radian
else:
for polygon in polygons:
if polygon.isInside(ra.radian, dec.radian):
print "{} inside pointing {} polygon, adding to field.".format(kbo_name, name)
field_kbos.append(kbo)
center_ra += ra.radian
center_dec += dec.radian
# logging.critical("KBOs in field {0}: {1}".format(name, ', '.join([n.name for n in field_kbos])))
today = start_date
while today < end_date:
today += time_step
mean_motion = (0, 0)
max_mag = 0.0
if len(field_kbos) > 0:
current_ra = 0
current_dec = 0
for kbo in field_kbos:
kbo.predict(today)
max_mag = max(max_mag, kbo.mag)
current_ra += kbo.coordinate.ra.radian
current_dec += kbo.coordinate.dec.radian
mean_motion = ((current_ra - center_ra) / len(field_kbos),
(current_dec - center_dec) / len(field_kbos))
ra = pointing['camera'].coordinate.ra.radian + mean_motion[0]
dec = pointing['camera'].coordinate.dec.radian + mean_motion[1]
cc = SkyCoord(ra=ra,
dec=dec,
unit=(units.radian, units.radian),
obstime=today)
dt = pointing_date - today
cc.dra = (mean_motion[0] * units.radian / dt.to(units.hour)).to(units.arcsec/units.hour).value*math.cos(dec)
cc.ddec = (mean_motion[1] * units.radian / dt.to(units.hour)).to(units.arcsec/units.hour).value
cc.mag = max_mag
et.append(cc)
et.save()
return
f = tkFileDialog.asksaveasfile()
if self.pointing_format.get() == 'Subaru':
for pointing in self.pointings:
(sra, sdec) = str(pointing["camera"]).split()
ra = sra.replace(":", "")
dec = sdec.replace(":", "")
name = pointing["label"]["text"]
f.write("""{}=OBJECT="{}" RA={} DEC={} EQUINOX=2000.0 INSROT_PA=90\n""".format(name,
name,
ra,
dec))
return
if self.pointing_format.get() == 'CFHT PH':
f.write("""<?xml version = "1.0"?>
<!DOCTYPE ASTRO SYSTEM "http://vizier.u-strasbg.fr/xml/astrores.dtd">
<ASTRO ID="v0.8" xmlns:ASTRO="http://vizier.u-strasbg.fr/doc/astrores.htx">
<TABLE ID="Table">
<NAME>Fixed Targets</NAME>
<TITLE>Fixed Targets for CFHT QSO</TITLE>
<!-- Definition of each field -->
<FIELD name="NAME" datatype="A" width="20">
<DESCRIPTION>Name of target</DESCRIPTION>
</FIELD>
<FIELD name="RA" ref="" datatype="A" width="11" unit=""h:m:s"">
<DESCRIPTION>Right ascension of target</DESCRIPTION>
</FIELD>
<FIELD name="DEC" ref="" datatype="A" width="11" unit=""d:m:s"">
<DESCRIPTION>Declination of target</DESCRIPTION>
</FIELD>
<FIELD name="EPOCH" datatype="F" width="6">
<DESCRIPTION>Epoch of coordinates</DESCRIPTION>
</FIELD>
<FIELD name="POINT" datatype="A" width="5">
<DESCRIPTION>Pointing name</DESCRIPTION>
</FIELD>
<!-- Data table -->
<DATA><CSV headlines="4" colsep="|"><![CDATA[
NAME |RA |DEC |EPOCH |POINT|
|hh:mm:ss.ss|+dd:mm:ss.s| | |
12345678901234567890|12345678901|12345678901|123456|12345|
--------------------|-----------|-----------|------|-----|\n""")
if self.pointing_format.get() == 'Palomar':
f.write("index\n")
for pointing in self.pointings:
i = i + 1
name = pointing["label"]["text"]
(sra, sdec) = str(pointing["camera"]).split()
ra = sra.split(":")
dec = sdec.split(":")
dec[0] = str(int(dec[0]))
if int(dec[0]) >= 0:
dec[0] = '+' + dec[0]
if self.pointing_format.get() == 'Palomar':
f.write("%5d %16s %2s %2s %4s %3s %2s %4s 2000\n" % (i, name,
ra[0].zfill(2),
ra[1].zfill(2),
ra[2].zfill(2),
dec[0].zfill(3),
dec[1].zfill(2),
dec[2].zfill(2)))
elif self.pointing_format.get() == 'CFHT PH':
# f.write("%f %f\n" % (pointing["camera"].ra,pointing["camera"].dec))
f.write("%-20s|%11s|%11s|%6.1f|%-5d|\n" % (name, sra, sdec, 2000.0, 1))
elif self.pointing_format.get() == 'KPNO/CTIO':
str1 = sra.replace(":", " ")
str2 = sdec.replace(":", " ")
f.write("%16s %16s %16s 2000\n" % ( name, str1, str2))
elif self.pointing_format.get() == 'SSim':
ra = []
dec = []
for ccd in pointing["camera"].geometry:
ra.append(ccd[0])
ra.append(ccd[2])
dec.append(ccd[1])
dec.append(ccd[3])
dra = math.degrees(math.fabs(max(ra) - min(ra)))
ddec = math.degrees(math.fabs(max(dec) - min(dec)))
f.write("%f %f %16s %16s DATE 1.00 1.00 500 FILE\n" % (dra, ddec, sra, sdec ))
if self.pointing_format.get() == 'CFHT PH':
f.write("""]]</CSV></DATA>
</TABLE>
</ASTRO>
""")
f.close() | [
"def",
"save_pointings",
"(",
"self",
")",
":",
"i",
"=",
"0",
"if",
"self",
".",
"pointing_format",
".",
"get",
"(",
")",
"in",
"[",
"'GEMINI ET'",
",",
"'CFHT ET'",
",",
"'CFHT API'",
"]",
":",
"logging",
".",
"info",
"(",
"'Beginning table pointing save... | Print the currently defined FOVs | [
"Print",
"the",
"currently",
"defined",
"FOVs"
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L893-L1058 |
OSSOS/MOP | src/ossos/core/ossos/planning/obs_planner.py | Plot.doplot | def doplot(self):
"""
Clear the plot and then redraw it.
"""
w = self
w.delete(ALL)
w.coord_grid()
w.objList.delete(0, END)
self._plot() | python | def doplot(self):
"""
Clear the plot and then redraw it.
"""
w = self
w.delete(ALL)
w.coord_grid()
w.objList.delete(0, END)
self._plot() | [
"def",
"doplot",
"(",
"self",
")",
":",
"w",
"=",
"self",
"w",
".",
"delete",
"(",
"ALL",
")",
"w",
".",
"coord_grid",
"(",
")",
"w",
".",
"objList",
".",
"delete",
"(",
"0",
",",
"END",
")",
"self",
".",
"_plot",
"(",
")"
] | Clear the plot and then redraw it. | [
"Clear",
"the",
"plot",
"and",
"then",
"redraw",
"it",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L1060-L1069 |
OSSOS/MOP | src/ossos/core/ossos/planning/obs_planner.py | Plot._plot | def _plot(self):
"""Draw the actual plot.
"""
w = self
kbos = self.kbos
re_string = w.FilterVar.get()
vlist = []
for name in kbos:
if not re.search(re_string, name):
continue
vlist.append(name)
fill = None
is_colossos_target = False
for cname in parameters.COLOSSOS:
if cname in name:
is_colossos_target = True
print "ColOSSOS: ", cname
break
is_terminated = False
for cname in tracking_termination:
if cname in name:
is_terminated = True
print "Terminated", cname
break
is_double = False
for cname in doubles:
if cname in name:
is_double = True
print 'Needs double:', cname
break
if type(kbos[name]) == type(ephem.EllipticalBody()):
try:
kbos[name].compute(w.date.get())
except Exception as e:
logging.error("Failed to compute KBO position. {}".format(name))
continue
ra = kbos[name].ra
dec = kbos[name].dec
a = math.radians(10.0 / 3600.0)
b = a
ang = 0.0
point_size = 1
yoffset = +10
xoffset = +10
elif isinstance(kbos[name], orbfit.Orbfit):
yoffset = -10
xoffset = -10
kbo = kbos[name]
pointing_date = mpc.Time(w.date.get(), scale='utc').jd
trail_mid_point = 0
for days in range(trail_mid_point * 2 + 1):
point_size = days == trail_mid_point and 5 or 1
today = mpc.Time(pointing_date - trail_mid_point + days, scale='utc', format='jd')
kbo.predict(today, 568)
ra = kbo.coordinate.ra.radian
dec = kbo.coordinate.dec.radian
from astropy import units
a = kbo.dra.to(units.radian).value
b = kbo.ddec.to(units.radian).value
ang = kbo.pa.to(units.radian).value
lost = False
if a > math.radians(0.3):
lost = True
fill_colour = (lost and "red") or "grey"
fill_colour = (is_double and "magenta") or fill_colour
fill_colour = (is_colossos_target and "blue") or fill_colour
point_colour = (is_terminated and "red") or "black"
w.create_point(ra, dec, size=point_size, color=point_colour, fill=fill_colour)
if w.show_ellipse.get() == 1 and days == trail_mid_point:
if a < math.radians(5.0):
w.create_ellipse(ra,
dec,
a,
b,
ang)
if w.show_labels.get() == 1:
w.label(ra, dec, name, offset=[xoffset, yoffset])
else:
ra = kbos[name]['RA']
dec = kbos[name]['DEC']
w.create_point(ra, dec, size=4, color='cyan')
w.label(ra, dec, name[-2:], offset=[-15, +15]) # truncate object name for plot clutter clarity
vlist.sort()
for v in vlist:
w.objList.insert(END, v)
w.plot_pointings() | python | def _plot(self):
"""Draw the actual plot.
"""
w = self
kbos = self.kbos
re_string = w.FilterVar.get()
vlist = []
for name in kbos:
if not re.search(re_string, name):
continue
vlist.append(name)
fill = None
is_colossos_target = False
for cname in parameters.COLOSSOS:
if cname in name:
is_colossos_target = True
print "ColOSSOS: ", cname
break
is_terminated = False
for cname in tracking_termination:
if cname in name:
is_terminated = True
print "Terminated", cname
break
is_double = False
for cname in doubles:
if cname in name:
is_double = True
print 'Needs double:', cname
break
if type(kbos[name]) == type(ephem.EllipticalBody()):
try:
kbos[name].compute(w.date.get())
except Exception as e:
logging.error("Failed to compute KBO position. {}".format(name))
continue
ra = kbos[name].ra
dec = kbos[name].dec
a = math.radians(10.0 / 3600.0)
b = a
ang = 0.0
point_size = 1
yoffset = +10
xoffset = +10
elif isinstance(kbos[name], orbfit.Orbfit):
yoffset = -10
xoffset = -10
kbo = kbos[name]
pointing_date = mpc.Time(w.date.get(), scale='utc').jd
trail_mid_point = 0
for days in range(trail_mid_point * 2 + 1):
point_size = days == trail_mid_point and 5 or 1
today = mpc.Time(pointing_date - trail_mid_point + days, scale='utc', format='jd')
kbo.predict(today, 568)
ra = kbo.coordinate.ra.radian
dec = kbo.coordinate.dec.radian
from astropy import units
a = kbo.dra.to(units.radian).value
b = kbo.ddec.to(units.radian).value
ang = kbo.pa.to(units.radian).value
lost = False
if a > math.radians(0.3):
lost = True
fill_colour = (lost and "red") or "grey"
fill_colour = (is_double and "magenta") or fill_colour
fill_colour = (is_colossos_target and "blue") or fill_colour
point_colour = (is_terminated and "red") or "black"
w.create_point(ra, dec, size=point_size, color=point_colour, fill=fill_colour)
if w.show_ellipse.get() == 1 and days == trail_mid_point:
if a < math.radians(5.0):
w.create_ellipse(ra,
dec,
a,
b,
ang)
if w.show_labels.get() == 1:
w.label(ra, dec, name, offset=[xoffset, yoffset])
else:
ra = kbos[name]['RA']
dec = kbos[name]['DEC']
w.create_point(ra, dec, size=4, color='cyan')
w.label(ra, dec, name[-2:], offset=[-15, +15]) # truncate object name for plot clutter clarity
vlist.sort()
for v in vlist:
w.objList.insert(END, v)
w.plot_pointings() | [
"def",
"_plot",
"(",
"self",
")",
":",
"w",
"=",
"self",
"kbos",
"=",
"self",
".",
"kbos",
"re_string",
"=",
"w",
".",
"FilterVar",
".",
"get",
"(",
")",
"vlist",
"=",
"[",
"]",
"for",
"name",
"in",
"kbos",
":",
"if",
"not",
"re",
".",
"search"... | Draw the actual plot. | [
"Draw",
"the",
"actual",
"plot",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/obs_planner.py#L1071-L1159 |
OSSOS/MOP | src/ossos/utils/match.py | planted | def planted(fk_candidate_observations, planted_objects, tolerance=10):
"""
Using the fk_candidate_observations as input get the Object.planted file from VOSpace and match
planted sources with found sources.
The Object.planted list is pulled from VOSpace based on the standard file-layout and name of the
first exposure as read from the .astrom file.
:param fk_candidate_observations: name of the fk*reals.astrom file to check against Object.planted
:param planted_objects: object containing the planted object information.
"""
found_pos = []
detections = fk_candidate_observations.get_sources()
for detection in detections:
reading = detection.get_reading(0)
# create a list of positions, to be used later by match_lists
found_pos.append([reading.x, reading.y])
# The match_list method expects a list that contains a position, not an x and a y vector, so we transpose.
planted_objects_table = planted_objects.table
planted_pos = numpy.transpose([planted_objects_table['x'].data, planted_objects_table['y'].data])
# match_idx is an order list. The list is in the order of the first list of positions and each entry
# is the index of the matching position from the second list.
(match_idx, match_fnd) = util.match_lists(numpy.array(planted_pos), numpy.array(found_pos), tolerance=tolerance)
return match_fnd, match_idx | python | def planted(fk_candidate_observations, planted_objects, tolerance=10):
"""
Using the fk_candidate_observations as input get the Object.planted file from VOSpace and match
planted sources with found sources.
The Object.planted list is pulled from VOSpace based on the standard file-layout and name of the
first exposure as read from the .astrom file.
:param fk_candidate_observations: name of the fk*reals.astrom file to check against Object.planted
:param planted_objects: object containing the planted object information.
"""
found_pos = []
detections = fk_candidate_observations.get_sources()
for detection in detections:
reading = detection.get_reading(0)
# create a list of positions, to be used later by match_lists
found_pos.append([reading.x, reading.y])
# The match_list method expects a list that contains a position, not an x and a y vector, so we transpose.
planted_objects_table = planted_objects.table
planted_pos = numpy.transpose([planted_objects_table['x'].data, planted_objects_table['y'].data])
# match_idx is an order list. The list is in the order of the first list of positions and each entry
# is the index of the matching position from the second list.
(match_idx, match_fnd) = util.match_lists(numpy.array(planted_pos), numpy.array(found_pos), tolerance=tolerance)
return match_fnd, match_idx | [
"def",
"planted",
"(",
"fk_candidate_observations",
",",
"planted_objects",
",",
"tolerance",
"=",
"10",
")",
":",
"found_pos",
"=",
"[",
"]",
"detections",
"=",
"fk_candidate_observations",
".",
"get_sources",
"(",
")",
"for",
"detection",
"in",
"detections",
"... | Using the fk_candidate_observations as input get the Object.planted file from VOSpace and match
planted sources with found sources.
The Object.planted list is pulled from VOSpace based on the standard file-layout and name of the
first exposure as read from the .astrom file.
:param fk_candidate_observations: name of the fk*reals.astrom file to check against Object.planted
:param planted_objects: object containing the planted object information. | [
"Using",
"the",
"fk_candidate_observations",
"as",
"input",
"get",
"the",
"Object",
".",
"planted",
"file",
"from",
"VOSpace",
"and",
"match",
"planted",
"sources",
"with",
"found",
"sources",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/utils/match.py#L36-L63 |
OSSOS/MOP | src/jjk/preproc/MOPcoord.py | mjd2gmst | def mjd2gmst(mjd):
"""Convert Modfied Juian Date (JD = 2400000.5) to GMST
Take from P.T. Walace routines"""
D2PI= 6.2831853071795864769252867665590057683943387987502
DS2R= 7.2722052166430399038487115353692196393452995355905e-5
tu = (mjd-51544.5)/36525.0;
st = fmod(mjd,1.0) * D2PI + ( 24110.54841 + ( 8640184.812866 + ( 0.093104 - 6.2e-6 * tu ) * tu ) * tu ) * DS2R
w = fmod(st,D2PI)
if w >= 0.0 :
return w
else:
return w+D2PI | python | def mjd2gmst(mjd):
"""Convert Modfied Juian Date (JD = 2400000.5) to GMST
Take from P.T. Walace routines"""
D2PI= 6.2831853071795864769252867665590057683943387987502
DS2R= 7.2722052166430399038487115353692196393452995355905e-5
tu = (mjd-51544.5)/36525.0;
st = fmod(mjd,1.0) * D2PI + ( 24110.54841 + ( 8640184.812866 + ( 0.093104 - 6.2e-6 * tu ) * tu ) * tu ) * DS2R
w = fmod(st,D2PI)
if w >= 0.0 :
return w
else:
return w+D2PI | [
"def",
"mjd2gmst",
"(",
"mjd",
")",
":",
"D2PI",
"=",
"6.2831853071795864769252867665590057683943387987502",
"DS2R",
"=",
"7.2722052166430399038487115353692196393452995355905e-5",
"tu",
"=",
"(",
"mjd",
"-",
"51544.5",
")",
"/",
"36525.0",
"st",
"=",
"fmod",
"(",
"m... | Convert Modfied Juian Date (JD = 2400000.5) to GMST
Take from P.T. Walace routines | [
"Convert",
"Modfied",
"Juian",
"Date",
"(",
"JD",
"=",
"2400000",
".",
"5",
")",
"to",
"GMST",
"Take",
"from",
"P",
".",
"T",
".",
"Walace",
"routines"
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/MOPcoord.py#L3-L19 |
OSSOS/MOP | src/jjk/preproc/MOPcoord.py | coord.ec2eq | def ec2eq(self):
"""Convert ecliptic coordinates to equatorial coordinates"""
import math
#from numpy.matlib import sin, cos, arcsin, arctan2
from math import sin, cos
from math import asin as arcsin
from math import atan2 as arctan2
from math import acos as arccos
eb=self.eb
el=self.el
ob=math.radians(23.439281)
dec = arcsin(sin(eb)*cos(ob)+cos(eb)*sin(ob)*sin(el))
sra = (sin(dec)*cos(ob)-sin(eb))/(cos(dec)*sin(ob))
cra = cos(el)*cos(eb)/cos(dec)
if sra < 1 and sra > -1 :
sa= arcsin(sra)
else:
sa = 0
ca= arccos(cra)
tsa=sa
tca=ca
if tsa<0 :
ca=2.0*math.pi-ca
if tca>=math.pi/2.0:
sa=math.pi-sa
if ca >= math.pi*2.0:
ca=ca-math.pi*2.0
self.tsa=sra
self.tca=cra
self.ra=ca
self.dec=dec | python | def ec2eq(self):
"""Convert ecliptic coordinates to equatorial coordinates"""
import math
#from numpy.matlib import sin, cos, arcsin, arctan2
from math import sin, cos
from math import asin as arcsin
from math import atan2 as arctan2
from math import acos as arccos
eb=self.eb
el=self.el
ob=math.radians(23.439281)
dec = arcsin(sin(eb)*cos(ob)+cos(eb)*sin(ob)*sin(el))
sra = (sin(dec)*cos(ob)-sin(eb))/(cos(dec)*sin(ob))
cra = cos(el)*cos(eb)/cos(dec)
if sra < 1 and sra > -1 :
sa= arcsin(sra)
else:
sa = 0
ca= arccos(cra)
tsa=sa
tca=ca
if tsa<0 :
ca=2.0*math.pi-ca
if tca>=math.pi/2.0:
sa=math.pi-sa
if ca >= math.pi*2.0:
ca=ca-math.pi*2.0
self.tsa=sra
self.tca=cra
self.ra=ca
self.dec=dec | [
"def",
"ec2eq",
"(",
"self",
")",
":",
"import",
"math",
"#from numpy.matlib import sin, cos, arcsin, arctan2",
"from",
"math",
"import",
"sin",
",",
"cos",
"from",
"math",
"import",
"asin",
"as",
"arcsin",
"from",
"math",
"import",
"atan2",
"as",
"arctan2",
"fr... | Convert ecliptic coordinates to equatorial coordinates | [
"Convert",
"ecliptic",
"coordinates",
"to",
"equatorial",
"coordinates"
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/MOPcoord.py#L100-L134 |
OSSOS/MOP | src/ossos/core/ossos/planning/layout36.py | plot_line | def plot_line(axes, fname, ltype):
"""plot the ecliptic plane line on the given axes."""
x = np.genfromtxt(fname, unpack=True)
axes.plot(x[0], x[1], ltype) | python | def plot_line(axes, fname, ltype):
"""plot the ecliptic plane line on the given axes."""
x = np.genfromtxt(fname, unpack=True)
axes.plot(x[0], x[1], ltype) | [
"def",
"plot_line",
"(",
"axes",
",",
"fname",
",",
"ltype",
")",
":",
"x",
"=",
"np",
".",
"genfromtxt",
"(",
"fname",
",",
"unpack",
"=",
"True",
")",
"axes",
".",
"plot",
"(",
"x",
"[",
"0",
"]",
",",
"x",
"[",
"1",
"]",
",",
"ltype",
")"
... | plot the ecliptic plane line on the given axes. | [
"plot",
"the",
"ecliptic",
"plane",
"line",
"on",
"the",
"given",
"axes",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/layout36.py#L28-L31 |
OSSOS/MOP | src/ossos/core/ossos/parameters.py | apmag_at_absmag | def apmag_at_absmag(H, d, phi=1):
"""
Calculate the apparent magnitude of a TNO given its absolute magnitude H, for a given distance.
:param H: TNO absolute magnitude (unitless)
:param d: barycentric distance (AU)
:param phi: phase angle (0-1, always v close to 1 for TNOs)
:return: apparent magnitude of TNO
"""
d_observer = 1. # 1 AU
# approximate object's distance d_heliocentric and d_geocentric as the same, d, because TNO
m_r = H + 2.5 * math.log10((d ** 4) / (phi * d_observer ** 4))
print("m_r = {:2.2f} for a H = {} TNO at {} AU at opposition.".format(
m_r, H, d))
return m_r | python | def apmag_at_absmag(H, d, phi=1):
"""
Calculate the apparent magnitude of a TNO given its absolute magnitude H, for a given distance.
:param H: TNO absolute magnitude (unitless)
:param d: barycentric distance (AU)
:param phi: phase angle (0-1, always v close to 1 for TNOs)
:return: apparent magnitude of TNO
"""
d_observer = 1. # 1 AU
# approximate object's distance d_heliocentric and d_geocentric as the same, d, because TNO
m_r = H + 2.5 * math.log10((d ** 4) / (phi * d_observer ** 4))
print("m_r = {:2.2f} for a H = {} TNO at {} AU at opposition.".format(
m_r, H, d))
return m_r | [
"def",
"apmag_at_absmag",
"(",
"H",
",",
"d",
",",
"phi",
"=",
"1",
")",
":",
"d_observer",
"=",
"1.",
"# 1 AU",
"# approximate object's distance d_heliocentric and d_geocentric as the same, d, because TNO",
"m_r",
"=",
"H",
"+",
"2.5",
"*",
"math",
".",
"log10",
... | Calculate the apparent magnitude of a TNO given its absolute magnitude H, for a given distance.
:param H: TNO absolute magnitude (unitless)
:param d: barycentric distance (AU)
:param phi: phase angle (0-1, always v close to 1 for TNOs)
:return: apparent magnitude of TNO | [
"Calculate",
"the",
"apparent",
"magnitude",
"of",
"a",
"TNO",
"given",
"its",
"absolute",
"magnitude",
"H",
"for",
"a",
"given",
"distance",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/parameters.py#L357-L371 |
OSSOS/MOP | src/ossos/core/ossos/parameters.py | tno.from_summary_line | def from_summary_line(cls, summaryLine, version=4, existing_object=None):
''' Summary format:
object mag stdev dist ..E nobs time av_xres av_yres max_x max_y
a ..E e ..E i ..E node ..E argperi ..E M ..E ra_dis dec_dis
'''
if not summaryLine:
raise ValueError('No summary line given')
if version == 4:
params = summaryLine.split()
if len(params) != 25:
print params
raise TypeError('Expected 25 columns, {0} given'.format(len(params)))
input_params = params[0:1] + params[3:23]
if not existing_object:
retval = cls(*input_params)
else:
assert isinstance(existing_object, tno)
assert existing_object.name == params[0]
retval = existing_object
retval.mean_mag = float(params[1])
retval.mean_mag_stdev = float(params[2])
retval.ra_discov = float(params[23])
retval.dec_discov = float(params[24])
else:
raise VersionError('Unknown version "{0}"'.format(version))
assert retval
return retval | python | def from_summary_line(cls, summaryLine, version=4, existing_object=None):
''' Summary format:
object mag stdev dist ..E nobs time av_xres av_yres max_x max_y
a ..E e ..E i ..E node ..E argperi ..E M ..E ra_dis dec_dis
'''
if not summaryLine:
raise ValueError('No summary line given')
if version == 4:
params = summaryLine.split()
if len(params) != 25:
print params
raise TypeError('Expected 25 columns, {0} given'.format(len(params)))
input_params = params[0:1] + params[3:23]
if not existing_object:
retval = cls(*input_params)
else:
assert isinstance(existing_object, tno)
assert existing_object.name == params[0]
retval = existing_object
retval.mean_mag = float(params[1])
retval.mean_mag_stdev = float(params[2])
retval.ra_discov = float(params[23])
retval.dec_discov = float(params[24])
else:
raise VersionError('Unknown version "{0}"'.format(version))
assert retval
return retval | [
"def",
"from_summary_line",
"(",
"cls",
",",
"summaryLine",
",",
"version",
"=",
"4",
",",
"existing_object",
"=",
"None",
")",
":",
"if",
"not",
"summaryLine",
":",
"raise",
"ValueError",
"(",
"'No summary line given'",
")",
"if",
"version",
"==",
"4",
":",... | Summary format:
object mag stdev dist ..E nobs time av_xres av_yres max_x max_y
a ..E e ..E i ..E node ..E argperi ..E M ..E ra_dis dec_dis | [
"Summary",
"format",
":",
"object",
"mag",
"stdev",
"dist",
"..",
"E",
"nobs",
"time",
"av_xres",
"av_yres",
"max_x",
"max_y",
"a",
"..",
"E",
"e",
"..",
"E",
"i",
"..",
"E",
"node",
"..",
"E",
"argperi",
"..",
"E",
"M",
"..",
"E",
"ra_dis",
"dec_d... | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/parameters.py#L270-L296 |
OSSOS/MOP | src/ossos/core/ossos/parameters.py | tno.from_class_line | def from_class_line(cls, classLine, version=4, existing_object=None):
'''
Class format:
class wrt n m security object mag stdev F H_sur dist ..E nobs time av_xres av_yres max_x max_y
a ..E e ..E i ..E node ..E argperi ..E time_peri ..E rate
'''
if not classLine:
raise ValueError('No class file line given.')
if version == 4:
params = classLine.split()
if len(params) != 31:
print params
raise TypeError('Expected 31 columns, {0} given'.format(len(params)))
input_params = [params[5]] + params[10:30] # the elements that are in common
if not existing_object:
# can later add more tests that the values match those in the existing object
retval = cls(*input_params)
else:
assert isinstance(existing_object, tno)
assert existing_object.name == params[5]
retval = existing_object
retval.classification = params[0]
retval.wrt = params[1]
retval.n = int(params[2])
retval.m = int(params[3])
retval.security = params[4]
retval.mag_discov = float(params[6])
retval.mag_discov_e = float(params[7])
retval.filter = params[8]
retval.H = float(
params[9]) # H_r magnitude, using the average m_r and the discovery geometry (same error as m_r)
retval.rate = float(params[30])
elif version == 5:
# Upgrade?
# input_params =
retval = cls(*input_params)
else:
raise VersionError('Unknown version "{0}"'.format(version))
assert retval
return retval | python | def from_class_line(cls, classLine, version=4, existing_object=None):
'''
Class format:
class wrt n m security object mag stdev F H_sur dist ..E nobs time av_xres av_yres max_x max_y
a ..E e ..E i ..E node ..E argperi ..E time_peri ..E rate
'''
if not classLine:
raise ValueError('No class file line given.')
if version == 4:
params = classLine.split()
if len(params) != 31:
print params
raise TypeError('Expected 31 columns, {0} given'.format(len(params)))
input_params = [params[5]] + params[10:30] # the elements that are in common
if not existing_object:
# can later add more tests that the values match those in the existing object
retval = cls(*input_params)
else:
assert isinstance(existing_object, tno)
assert existing_object.name == params[5]
retval = existing_object
retval.classification = params[0]
retval.wrt = params[1]
retval.n = int(params[2])
retval.m = int(params[3])
retval.security = params[4]
retval.mag_discov = float(params[6])
retval.mag_discov_e = float(params[7])
retval.filter = params[8]
retval.H = float(
params[9]) # H_r magnitude, using the average m_r and the discovery geometry (same error as m_r)
retval.rate = float(params[30])
elif version == 5:
# Upgrade?
# input_params =
retval = cls(*input_params)
else:
raise VersionError('Unknown version "{0}"'.format(version))
assert retval
return retval | [
"def",
"from_class_line",
"(",
"cls",
",",
"classLine",
",",
"version",
"=",
"4",
",",
"existing_object",
"=",
"None",
")",
":",
"if",
"not",
"classLine",
":",
"raise",
"ValueError",
"(",
"'No class file line given.'",
")",
"if",
"version",
"==",
"4",
":",
... | Class format:
class wrt n m security object mag stdev F H_sur dist ..E nobs time av_xres av_yres max_x max_y
a ..E e ..E i ..E node ..E argperi ..E time_peri ..E rate | [
"Class",
"format",
":",
"class",
"wrt",
"n",
"m",
"security",
"object",
"mag",
"stdev",
"F",
"H_sur",
"dist",
"..",
"E",
"nobs",
"time",
"av_xres",
"av_yres",
"max_x",
"max_y",
"a",
"..",
"E",
"e",
"..",
"E",
"i",
"..",
"E",
"node",
"..",
"E",
"arg... | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/parameters.py#L299-L338 |
openstack/python-scciclient | scciclient/irmc/scci.py | scci_cmd | def scci_cmd(host, userid, password, cmd, port=443, auth_method='basic',
client_timeout=60, do_async=True, **kwargs):
"""execute SCCI command
This function calls SCCI server modules
:param host: hostname or IP of iRMC
:param userid: userid for iRMC with administrator privileges
:param password: password for userid
:param cmd: SCCI command
:param port: port number of iRMC
:param auth_method: irmc_username
:param client_timeout: timeout for SCCI operations
:param do_async: async call if True, sync call otherwise
:returns: requests.Response from SCCI server
:raises: SCCIInvalidInputError if port and/or auth_method params
are invalid
:raises: SCCIClientError if SCCI failed
"""
auth_obj = None
try:
protocol = {80: 'http', 443: 'https'}[port]
auth_obj = {
'basic': requests.auth.HTTPBasicAuth(userid, password),
'digest': requests.auth.HTTPDigestAuth(userid, password)
}[auth_method.lower()]
except KeyError:
raise SCCIInvalidInputError(
("Invalid port %(port)d or " +
"auth_method for method %(auth_method)s") %
{'port': port, 'auth_method': auth_method})
try:
header = {'Content-type': 'application/x-www-form-urlencoded'}
if kwargs.get('upgrade_type') == 'irmc':
with open(cmd, 'rb') as file:
data = file.read()
config_type = '/irmcupdate?flashSelect=255'
elif kwargs.get('upgrade_type') == 'bios':
with open(cmd, 'rb') as file:
data = file.read()
config_type = '/biosupdate'
else:
data = cmd
config_type = '/config'
r = requests.post(protocol + '://' + host + config_type,
data=data,
headers=header,
verify=False,
timeout=client_timeout,
allow_redirects=False,
auth=auth_obj)
if not do_async:
time.sleep(5)
if DEBUG:
print(cmd)
print(r.text)
print("do_async = %s" % do_async)
if r.status_code not in (200, 201):
raise SCCIClientError(
('HTTP PROTOCOL ERROR, STATUS CODE = %s' %
str(r.status_code)))
result_xml = ET.fromstring(r.text)
status = result_xml.find("./Value")
# severity = result_xml.find("./Severity")
error = result_xml.find("./Error")
message = result_xml.find("./Message")
if not int(status.text) == 0:
raise SCCIClientError(
('SCCI PROTOCOL ERROR, STATUS CODE = %s, '
'ERROR = %s, MESSAGE = %s' %
(str(status.text), error.text, message.text)))
else:
return r
except IOError as input_error:
raise SCCIClientError(input_error)
except ET.ParseError as parse_error:
raise SCCIClientError(parse_error)
except requests.exceptions.RequestException as requests_exception:
raise SCCIClientError(requests_exception) | python | def scci_cmd(host, userid, password, cmd, port=443, auth_method='basic',
client_timeout=60, do_async=True, **kwargs):
"""execute SCCI command
This function calls SCCI server modules
:param host: hostname or IP of iRMC
:param userid: userid for iRMC with administrator privileges
:param password: password for userid
:param cmd: SCCI command
:param port: port number of iRMC
:param auth_method: irmc_username
:param client_timeout: timeout for SCCI operations
:param do_async: async call if True, sync call otherwise
:returns: requests.Response from SCCI server
:raises: SCCIInvalidInputError if port and/or auth_method params
are invalid
:raises: SCCIClientError if SCCI failed
"""
auth_obj = None
try:
protocol = {80: 'http', 443: 'https'}[port]
auth_obj = {
'basic': requests.auth.HTTPBasicAuth(userid, password),
'digest': requests.auth.HTTPDigestAuth(userid, password)
}[auth_method.lower()]
except KeyError:
raise SCCIInvalidInputError(
("Invalid port %(port)d or " +
"auth_method for method %(auth_method)s") %
{'port': port, 'auth_method': auth_method})
try:
header = {'Content-type': 'application/x-www-form-urlencoded'}
if kwargs.get('upgrade_type') == 'irmc':
with open(cmd, 'rb') as file:
data = file.read()
config_type = '/irmcupdate?flashSelect=255'
elif kwargs.get('upgrade_type') == 'bios':
with open(cmd, 'rb') as file:
data = file.read()
config_type = '/biosupdate'
else:
data = cmd
config_type = '/config'
r = requests.post(protocol + '://' + host + config_type,
data=data,
headers=header,
verify=False,
timeout=client_timeout,
allow_redirects=False,
auth=auth_obj)
if not do_async:
time.sleep(5)
if DEBUG:
print(cmd)
print(r.text)
print("do_async = %s" % do_async)
if r.status_code not in (200, 201):
raise SCCIClientError(
('HTTP PROTOCOL ERROR, STATUS CODE = %s' %
str(r.status_code)))
result_xml = ET.fromstring(r.text)
status = result_xml.find("./Value")
# severity = result_xml.find("./Severity")
error = result_xml.find("./Error")
message = result_xml.find("./Message")
if not int(status.text) == 0:
raise SCCIClientError(
('SCCI PROTOCOL ERROR, STATUS CODE = %s, '
'ERROR = %s, MESSAGE = %s' %
(str(status.text), error.text, message.text)))
else:
return r
except IOError as input_error:
raise SCCIClientError(input_error)
except ET.ParseError as parse_error:
raise SCCIClientError(parse_error)
except requests.exceptions.RequestException as requests_exception:
raise SCCIClientError(requests_exception) | [
"def",
"scci_cmd",
"(",
"host",
",",
"userid",
",",
"password",
",",
"cmd",
",",
"port",
"=",
"443",
",",
"auth_method",
"=",
"'basic'",
",",
"client_timeout",
"=",
"60",
",",
"do_async",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"auth_obj",
"=... | execute SCCI command
This function calls SCCI server modules
:param host: hostname or IP of iRMC
:param userid: userid for iRMC with administrator privileges
:param password: password for userid
:param cmd: SCCI command
:param port: port number of iRMC
:param auth_method: irmc_username
:param client_timeout: timeout for SCCI operations
:param do_async: async call if True, sync call otherwise
:returns: requests.Response from SCCI server
:raises: SCCIInvalidInputError if port and/or auth_method params
are invalid
:raises: SCCIClientError if SCCI failed | [
"execute",
"SCCI",
"command"
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/scci.py#L249-L333 |
openstack/python-scciclient | scciclient/irmc/scci.py | get_client | def get_client(host, userid, password, port=443, auth_method='basic',
client_timeout=60, **kwargs):
"""get SCCI command partial function
This function returns SCCI command partial function
:param host: hostname or IP of iRMC
:param userid: userid for iRMC with administrator privileges
:param password: password for userid
:param port: port number of iRMC
:param auth_method: irmc_username
:param client_timeout: timeout for SCCI operations
:returns: scci_cmd partial function which takes a SCCI command param
"""
return functools.partial(scci_cmd, host, userid, password,
port=port, auth_method=auth_method,
client_timeout=client_timeout, **kwargs) | python | def get_client(host, userid, password, port=443, auth_method='basic',
client_timeout=60, **kwargs):
"""get SCCI command partial function
This function returns SCCI command partial function
:param host: hostname or IP of iRMC
:param userid: userid for iRMC with administrator privileges
:param password: password for userid
:param port: port number of iRMC
:param auth_method: irmc_username
:param client_timeout: timeout for SCCI operations
:returns: scci_cmd partial function which takes a SCCI command param
"""
return functools.partial(scci_cmd, host, userid, password,
port=port, auth_method=auth_method,
client_timeout=client_timeout, **kwargs) | [
"def",
"get_client",
"(",
"host",
",",
"userid",
",",
"password",
",",
"port",
"=",
"443",
",",
"auth_method",
"=",
"'basic'",
",",
"client_timeout",
"=",
"60",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"functools",
".",
"partial",
"(",
"scci_cmd",
... | get SCCI command partial function
This function returns SCCI command partial function
:param host: hostname or IP of iRMC
:param userid: userid for iRMC with administrator privileges
:param password: password for userid
:param port: port number of iRMC
:param auth_method: irmc_username
:param client_timeout: timeout for SCCI operations
:returns: scci_cmd partial function which takes a SCCI command param | [
"get",
"SCCI",
"command",
"partial",
"function"
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/scci.py#L336-L352 |
openstack/python-scciclient | scciclient/irmc/scci.py | get_virtual_cd_set_params_cmd | def get_virtual_cd_set_params_cmd(remote_image_server,
remote_image_user_domain,
remote_image_share_type,
remote_image_share_name,
remote_image_deploy_iso,
remote_image_username,
remote_image_user_password):
"""get Virtual CD Media Set Parameters Command
This function returns Virtual CD Media Set Parameters Command
:param remote_image_server: remote image server name or IP
:param remote_image_user_domain: domain name of remote image server
:param remote_image_share_type: share type of ShareType
:param remote_image_share_name: share name
:param remote_image_deploy_iso: deploy ISO image file name
:param remote_image_username: username of remote image server
:param remote_image_user_password: password of the username
:returns: SCCI command
"""
cmd = _VIRTUAL_MEDIA_CD_SETTINGS % (
remote_image_server,
remote_image_user_domain,
remote_image_share_type,
remote_image_share_name,
remote_image_deploy_iso,
remote_image_username,
remote_image_user_password)
return cmd | python | def get_virtual_cd_set_params_cmd(remote_image_server,
remote_image_user_domain,
remote_image_share_type,
remote_image_share_name,
remote_image_deploy_iso,
remote_image_username,
remote_image_user_password):
"""get Virtual CD Media Set Parameters Command
This function returns Virtual CD Media Set Parameters Command
:param remote_image_server: remote image server name or IP
:param remote_image_user_domain: domain name of remote image server
:param remote_image_share_type: share type of ShareType
:param remote_image_share_name: share name
:param remote_image_deploy_iso: deploy ISO image file name
:param remote_image_username: username of remote image server
:param remote_image_user_password: password of the username
:returns: SCCI command
"""
cmd = _VIRTUAL_MEDIA_CD_SETTINGS % (
remote_image_server,
remote_image_user_domain,
remote_image_share_type,
remote_image_share_name,
remote_image_deploy_iso,
remote_image_username,
remote_image_user_password)
return cmd | [
"def",
"get_virtual_cd_set_params_cmd",
"(",
"remote_image_server",
",",
"remote_image_user_domain",
",",
"remote_image_share_type",
",",
"remote_image_share_name",
",",
"remote_image_deploy_iso",
",",
"remote_image_username",
",",
"remote_image_user_password",
")",
":",
"cmd",
... | get Virtual CD Media Set Parameters Command
This function returns Virtual CD Media Set Parameters Command
:param remote_image_server: remote image server name or IP
:param remote_image_user_domain: domain name of remote image server
:param remote_image_share_type: share type of ShareType
:param remote_image_share_name: share name
:param remote_image_deploy_iso: deploy ISO image file name
:param remote_image_username: username of remote image server
:param remote_image_user_password: password of the username
:returns: SCCI command | [
"get",
"Virtual",
"CD",
"Media",
"Set",
"Parameters",
"Command"
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/scci.py#L355-L384 |
openstack/python-scciclient | scciclient/irmc/scci.py | get_virtual_fd_set_params_cmd | def get_virtual_fd_set_params_cmd(remote_image_server,
remote_image_user_domain,
remote_image_share_type,
remote_image_share_name,
remote_image_floppy_fat,
remote_image_username,
remote_image_user_password):
"""get Virtual FD Media Set Parameters Command
This function returns Virtual FD Media Set Parameters Command
:param remote_image_server: remote image server name or IP
:param remote_image_user_domain: domain name of remote image server
:param remote_image_share_type: share type of ShareType
:param remote_image_share_name: share name
:param remote_image_deploy_iso: deploy ISO image file name
:param remote_image_username: username of remote image server
:param remote_image_user_password: password of the username
:returns: SCCI command
"""
cmd = _VIRTUAL_MEDIA_FD_SETTINGS % (
remote_image_server,
remote_image_user_domain,
remote_image_share_type,
remote_image_share_name,
remote_image_floppy_fat,
remote_image_username,
remote_image_user_password)
return cmd | python | def get_virtual_fd_set_params_cmd(remote_image_server,
remote_image_user_domain,
remote_image_share_type,
remote_image_share_name,
remote_image_floppy_fat,
remote_image_username,
remote_image_user_password):
"""get Virtual FD Media Set Parameters Command
This function returns Virtual FD Media Set Parameters Command
:param remote_image_server: remote image server name or IP
:param remote_image_user_domain: domain name of remote image server
:param remote_image_share_type: share type of ShareType
:param remote_image_share_name: share name
:param remote_image_deploy_iso: deploy ISO image file name
:param remote_image_username: username of remote image server
:param remote_image_user_password: password of the username
:returns: SCCI command
"""
cmd = _VIRTUAL_MEDIA_FD_SETTINGS % (
remote_image_server,
remote_image_user_domain,
remote_image_share_type,
remote_image_share_name,
remote_image_floppy_fat,
remote_image_username,
remote_image_user_password)
return cmd | [
"def",
"get_virtual_fd_set_params_cmd",
"(",
"remote_image_server",
",",
"remote_image_user_domain",
",",
"remote_image_share_type",
",",
"remote_image_share_name",
",",
"remote_image_floppy_fat",
",",
"remote_image_username",
",",
"remote_image_user_password",
")",
":",
"cmd",
... | get Virtual FD Media Set Parameters Command
This function returns Virtual FD Media Set Parameters Command
:param remote_image_server: remote image server name or IP
:param remote_image_user_domain: domain name of remote image server
:param remote_image_share_type: share type of ShareType
:param remote_image_share_name: share name
:param remote_image_deploy_iso: deploy ISO image file name
:param remote_image_username: username of remote image server
:param remote_image_user_password: password of the username
:returns: SCCI command | [
"get",
"Virtual",
"FD",
"Media",
"Set",
"Parameters",
"Command"
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/scci.py#L387-L415 |
openstack/python-scciclient | scciclient/irmc/scci.py | get_report | def get_report(host, userid, password,
port=443, auth_method='basic', client_timeout=60):
"""get iRMC report
This function returns iRMC report in XML format
:param host: hostname or IP of iRMC
:param userid: userid for iRMC with administrator privileges
:param password: password for userid
:param port: port number of iRMC
:param auth_method: irmc_username
:param client_timeout: timeout for SCCI operations
:returns: root element of SCCI report
:raises: ISCCIInvalidInputError if port and/or auth_method params
are invalid
:raises: SCCIClientError if SCCI failed
"""
auth_obj = None
try:
protocol = {80: 'http', 443: 'https'}[port]
auth_obj = {
'basic': requests.auth.HTTPBasicAuth(userid, password),
'digest': requests.auth.HTTPDigestAuth(userid, password)
}[auth_method.lower()]
except KeyError:
raise SCCIInvalidInputError(
("Invalid port %(port)d or " +
"auth_method for method %(auth_method)s") %
{'port': port, 'auth_method': auth_method})
try:
r = requests.get(protocol + '://' + host + '/report.xml',
verify=False,
timeout=(10, client_timeout),
allow_redirects=False,
auth=auth_obj)
if r.status_code not in (200, 201):
raise SCCIClientError(
('HTTP PROTOCOL ERROR, STATUS CODE = %s' %
str(r.status_code)))
root = ET.fromstring(r.text)
return root
except ET.ParseError as parse_error:
raise SCCIClientError(parse_error)
except requests.exceptions.RequestException as requests_exception:
raise SCCIClientError(requests_exception) | python | def get_report(host, userid, password,
port=443, auth_method='basic', client_timeout=60):
"""get iRMC report
This function returns iRMC report in XML format
:param host: hostname or IP of iRMC
:param userid: userid for iRMC with administrator privileges
:param password: password for userid
:param port: port number of iRMC
:param auth_method: irmc_username
:param client_timeout: timeout for SCCI operations
:returns: root element of SCCI report
:raises: ISCCIInvalidInputError if port and/or auth_method params
are invalid
:raises: SCCIClientError if SCCI failed
"""
auth_obj = None
try:
protocol = {80: 'http', 443: 'https'}[port]
auth_obj = {
'basic': requests.auth.HTTPBasicAuth(userid, password),
'digest': requests.auth.HTTPDigestAuth(userid, password)
}[auth_method.lower()]
except KeyError:
raise SCCIInvalidInputError(
("Invalid port %(port)d or " +
"auth_method for method %(auth_method)s") %
{'port': port, 'auth_method': auth_method})
try:
r = requests.get(protocol + '://' + host + '/report.xml',
verify=False,
timeout=(10, client_timeout),
allow_redirects=False,
auth=auth_obj)
if r.status_code not in (200, 201):
raise SCCIClientError(
('HTTP PROTOCOL ERROR, STATUS CODE = %s' %
str(r.status_code)))
root = ET.fromstring(r.text)
return root
except ET.ParseError as parse_error:
raise SCCIClientError(parse_error)
except requests.exceptions.RequestException as requests_exception:
raise SCCIClientError(requests_exception) | [
"def",
"get_report",
"(",
"host",
",",
"userid",
",",
"password",
",",
"port",
"=",
"443",
",",
"auth_method",
"=",
"'basic'",
",",
"client_timeout",
"=",
"60",
")",
":",
"auth_obj",
"=",
"None",
"try",
":",
"protocol",
"=",
"{",
"80",
":",
"'http'",
... | get iRMC report
This function returns iRMC report in XML format
:param host: hostname or IP of iRMC
:param userid: userid for iRMC with administrator privileges
:param password: password for userid
:param port: port number of iRMC
:param auth_method: irmc_username
:param client_timeout: timeout for SCCI operations
:returns: root element of SCCI report
:raises: ISCCIInvalidInputError if port and/or auth_method params
are invalid
:raises: SCCIClientError if SCCI failed | [
"get",
"iRMC",
"report"
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/scci.py#L418-L468 |
openstack/python-scciclient | scciclient/irmc/scci.py | get_essential_properties | def get_essential_properties(report, prop_keys):
"""get essential properties
This function returns a dictionary which contains keys as in
prop_keys and its values from the report.
:param report: SCCI report element
:param prop_keys: a list of keys for essential properties
:returns: a dictionary which contains keys as in
prop_keys and its values.
"""
v = {}
v['memory_mb'] = int(report.find('./System/Memory/Installed').text)
v['local_gb'] = sum(
[int(int(size.text) / 1024)
for size in report.findall('.//PhysicalDrive/ConfigurableSize')])
v['cpus'] = sum([int(cpu.find('./CoreNumber').text)
for cpu in report.find('./System/Processor')
if cpu.find('./CoreNumber') is not None])
# v['cpus'] = sum([int(cpu.find('./LogicalCpuNumber').text)
# for cpu in report.find('./System/Processor')])
v['cpu_arch'] = 'x86_64'
return {k: v[k] for k in prop_keys} | python | def get_essential_properties(report, prop_keys):
"""get essential properties
This function returns a dictionary which contains keys as in
prop_keys and its values from the report.
:param report: SCCI report element
:param prop_keys: a list of keys for essential properties
:returns: a dictionary which contains keys as in
prop_keys and its values.
"""
v = {}
v['memory_mb'] = int(report.find('./System/Memory/Installed').text)
v['local_gb'] = sum(
[int(int(size.text) / 1024)
for size in report.findall('.//PhysicalDrive/ConfigurableSize')])
v['cpus'] = sum([int(cpu.find('./CoreNumber').text)
for cpu in report.find('./System/Processor')
if cpu.find('./CoreNumber') is not None])
# v['cpus'] = sum([int(cpu.find('./LogicalCpuNumber').text)
# for cpu in report.find('./System/Processor')])
v['cpu_arch'] = 'x86_64'
return {k: v[k] for k in prop_keys} | [
"def",
"get_essential_properties",
"(",
"report",
",",
"prop_keys",
")",
":",
"v",
"=",
"{",
"}",
"v",
"[",
"'memory_mb'",
"]",
"=",
"int",
"(",
"report",
".",
"find",
"(",
"'./System/Memory/Installed'",
")",
".",
"text",
")",
"v",
"[",
"'local_gb'",
"]"... | get essential properties
This function returns a dictionary which contains keys as in
prop_keys and its values from the report.
:param report: SCCI report element
:param prop_keys: a list of keys for essential properties
:returns: a dictionary which contains keys as in
prop_keys and its values. | [
"get",
"essential",
"properties"
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/scci.py#L497-L520 |
openstack/python-scciclient | scciclient/irmc/scci.py | get_capabilities_properties | def get_capabilities_properties(d_info,
capa_keys,
gpu_ids,
fpga_ids=None,
**kwargs):
"""get capabilities properties
This function returns a dictionary which contains keys
and their values from the report.
:param d_info: the dictionary of ipmitool parameters for accessing a node.
:param capa_keys: a list of keys for additional capabilities properties.
:param gpu_ids: the list of string contains <vendorID>/<deviceID>
for GPU.
:param fpga_ids: the list of string contains <vendorID>/<deviceID>
for CPU FPGA.
:param kwargs: additional arguments passed to scciclient.
:returns: a dictionary which contains keys and their values.
"""
snmp_client = snmp.SNMPClient(d_info['irmc_address'],
d_info['irmc_snmp_port'],
d_info['irmc_snmp_version'],
d_info['irmc_snmp_community'],
d_info['irmc_snmp_security'])
try:
v = {}
if 'rom_firmware_version' in capa_keys:
v['rom_firmware_version'] = \
snmp.get_bios_firmware_version(snmp_client)
if 'irmc_firmware_version' in capa_keys:
v['irmc_firmware_version'] = \
snmp.get_irmc_firmware_version(snmp_client)
if 'server_model' in capa_keys:
v['server_model'] = snmp.get_server_model(snmp_client)
# Sometime the server started but PCI device list building is
# still in progress so system will response error. We have to wait
# for some more seconds.
if kwargs.get('sleep_flag', False) and \
any(k in capa_keys for k in ('pci_gpu_devices', 'cpu_fpga')):
time.sleep(5)
if 'pci_gpu_devices' in capa_keys:
v['pci_gpu_devices'] = ipmi.get_pci_device(d_info, gpu_ids)
if fpga_ids is not None and 'cpu_fpga' in capa_keys:
v['cpu_fpga'] = ipmi.get_pci_device(d_info, fpga_ids)
if 'trusted_boot' in capa_keys:
v['trusted_boot'] = ipmi.get_tpm_status(d_info)
return v
except (snmp.SNMPFailure, ipmi.IPMIFailure) as err:
raise SCCIClientError('Capabilities inspection failed: %s' % err) | python | def get_capabilities_properties(d_info,
capa_keys,
gpu_ids,
fpga_ids=None,
**kwargs):
"""get capabilities properties
This function returns a dictionary which contains keys
and their values from the report.
:param d_info: the dictionary of ipmitool parameters for accessing a node.
:param capa_keys: a list of keys for additional capabilities properties.
:param gpu_ids: the list of string contains <vendorID>/<deviceID>
for GPU.
:param fpga_ids: the list of string contains <vendorID>/<deviceID>
for CPU FPGA.
:param kwargs: additional arguments passed to scciclient.
:returns: a dictionary which contains keys and their values.
"""
snmp_client = snmp.SNMPClient(d_info['irmc_address'],
d_info['irmc_snmp_port'],
d_info['irmc_snmp_version'],
d_info['irmc_snmp_community'],
d_info['irmc_snmp_security'])
try:
v = {}
if 'rom_firmware_version' in capa_keys:
v['rom_firmware_version'] = \
snmp.get_bios_firmware_version(snmp_client)
if 'irmc_firmware_version' in capa_keys:
v['irmc_firmware_version'] = \
snmp.get_irmc_firmware_version(snmp_client)
if 'server_model' in capa_keys:
v['server_model'] = snmp.get_server_model(snmp_client)
# Sometime the server started but PCI device list building is
# still in progress so system will response error. We have to wait
# for some more seconds.
if kwargs.get('sleep_flag', False) and \
any(k in capa_keys for k in ('pci_gpu_devices', 'cpu_fpga')):
time.sleep(5)
if 'pci_gpu_devices' in capa_keys:
v['pci_gpu_devices'] = ipmi.get_pci_device(d_info, gpu_ids)
if fpga_ids is not None and 'cpu_fpga' in capa_keys:
v['cpu_fpga'] = ipmi.get_pci_device(d_info, fpga_ids)
if 'trusted_boot' in capa_keys:
v['trusted_boot'] = ipmi.get_tpm_status(d_info)
return v
except (snmp.SNMPFailure, ipmi.IPMIFailure) as err:
raise SCCIClientError('Capabilities inspection failed: %s' % err) | [
"def",
"get_capabilities_properties",
"(",
"d_info",
",",
"capa_keys",
",",
"gpu_ids",
",",
"fpga_ids",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"snmp_client",
"=",
"snmp",
".",
"SNMPClient",
"(",
"d_info",
"[",
"'irmc_address'",
"]",
",",
"d_info",
... | get capabilities properties
This function returns a dictionary which contains keys
and their values from the report.
:param d_info: the dictionary of ipmitool parameters for accessing a node.
:param capa_keys: a list of keys for additional capabilities properties.
:param gpu_ids: the list of string contains <vendorID>/<deviceID>
for GPU.
:param fpga_ids: the list of string contains <vendorID>/<deviceID>
for CPU FPGA.
:param kwargs: additional arguments passed to scciclient.
:returns: a dictionary which contains keys and their values. | [
"get",
"capabilities",
"properties"
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/scci.py#L523-L580 |
openstack/python-scciclient | scciclient/irmc/scci.py | process_session_status | def process_session_status(irmc_info, session_timeout, upgrade_type):
"""process session for Bios config backup/restore or RAID config operation
:param irmc_info: node info
:param session_timeout: session timeout
:param upgrade_type: flag to check upgrade with bios or irmc
:return: a dict with following values:
{
'upgrade_message': <Message of firmware upgrade mechanism>,
'upgrade_status'
}
"""
session_expiration = time.time() + session_timeout
while time.time() < session_expiration:
try:
# Get session status to check
session = get_firmware_upgrade_status(irmc_info, upgrade_type)
except SCCIClientError:
# Ignore checking during rebooted server
time.sleep(10)
continue
status = session.find("./Value").text
severity = session.find("./Severity").text
message = session.find("./Message").text
result = {}
if severity == 'Information' and status != '0':
if 'FLASH successful' in message:
result['upgrade_status'] = 'Complete'
return result
# Sleep a bit
time.sleep(5)
elif severity == 'Error':
result['upgrade_status'] = 'Error'
return result
else:
# Error occurred, get session log to see what happened
session_log = message
raise SCCIClientError('Failed to set firmware upgrade. '
'Session log is %s.' % session_log)
else:
raise SCCISessionTimeout('Failed to time out mechanism with %s.'
% session_expiration) | python | def process_session_status(irmc_info, session_timeout, upgrade_type):
"""process session for Bios config backup/restore or RAID config operation
:param irmc_info: node info
:param session_timeout: session timeout
:param upgrade_type: flag to check upgrade with bios or irmc
:return: a dict with following values:
{
'upgrade_message': <Message of firmware upgrade mechanism>,
'upgrade_status'
}
"""
session_expiration = time.time() + session_timeout
while time.time() < session_expiration:
try:
# Get session status to check
session = get_firmware_upgrade_status(irmc_info, upgrade_type)
except SCCIClientError:
# Ignore checking during rebooted server
time.sleep(10)
continue
status = session.find("./Value").text
severity = session.find("./Severity").text
message = session.find("./Message").text
result = {}
if severity == 'Information' and status != '0':
if 'FLASH successful' in message:
result['upgrade_status'] = 'Complete'
return result
# Sleep a bit
time.sleep(5)
elif severity == 'Error':
result['upgrade_status'] = 'Error'
return result
else:
# Error occurred, get session log to see what happened
session_log = message
raise SCCIClientError('Failed to set firmware upgrade. '
'Session log is %s.' % session_log)
else:
raise SCCISessionTimeout('Failed to time out mechanism with %s.'
% session_expiration) | [
"def",
"process_session_status",
"(",
"irmc_info",
",",
"session_timeout",
",",
"upgrade_type",
")",
":",
"session_expiration",
"=",
"time",
".",
"time",
"(",
")",
"+",
"session_timeout",
"while",
"time",
".",
"time",
"(",
")",
"<",
"session_expiration",
":",
... | process session for Bios config backup/restore or RAID config operation
:param irmc_info: node info
:param session_timeout: session timeout
:param upgrade_type: flag to check upgrade with bios or irmc
:return: a dict with following values:
{
'upgrade_message': <Message of firmware upgrade mechanism>,
'upgrade_status'
} | [
"process",
"session",
"for",
"Bios",
"config",
"backup",
"/",
"restore",
"or",
"RAID",
"config",
"operation"
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/scci.py#L583-L628 |
openstack/python-scciclient | scciclient/irmc/scci.py | get_raid_fgi_status | def get_raid_fgi_status(report):
"""Gather fgi(foreground initialization) information of raid configuration
This function returns a fgi status which contains activity status
and its values from the report.
:param report: SCCI report information
:returns: dict of fgi status of logical_drives, such as Initializing (10%)
or Idle. e.g: {'0': 'Idle', '1': 'Initializing (10%)'}
:raises: SCCIInvalidInputError: fail report input.
SCCIRAIDNotReady: waiting for RAID configuration to complete.
"""
fgi_status = {}
raid_path = "./Software/ServerView/ServerViewRaid"
if not report.find(raid_path):
raise SCCIInvalidInputError(
"ServerView RAID not available in Bare metal Server")
if not report.find(raid_path + "/amEMSV/System/Adapter/LogicalDrive"):
raise SCCIRAIDNotReady(
"RAID configuration not configure in Bare metal Server yet")
logical_drives = report.findall(raid_path +
"/amEMSV/System/Adapter/LogicalDrive")
for logical_drive_name in logical_drives:
status = logical_drive_name.find("./Activity").text
name = logical_drive_name.find("./LogDriveNumber").text
fgi_status.update({name: status})
return fgi_status | python | def get_raid_fgi_status(report):
"""Gather fgi(foreground initialization) information of raid configuration
This function returns a fgi status which contains activity status
and its values from the report.
:param report: SCCI report information
:returns: dict of fgi status of logical_drives, such as Initializing (10%)
or Idle. e.g: {'0': 'Idle', '1': 'Initializing (10%)'}
:raises: SCCIInvalidInputError: fail report input.
SCCIRAIDNotReady: waiting for RAID configuration to complete.
"""
fgi_status = {}
raid_path = "./Software/ServerView/ServerViewRaid"
if not report.find(raid_path):
raise SCCIInvalidInputError(
"ServerView RAID not available in Bare metal Server")
if not report.find(raid_path + "/amEMSV/System/Adapter/LogicalDrive"):
raise SCCIRAIDNotReady(
"RAID configuration not configure in Bare metal Server yet")
logical_drives = report.findall(raid_path +
"/amEMSV/System/Adapter/LogicalDrive")
for logical_drive_name in logical_drives:
status = logical_drive_name.find("./Activity").text
name = logical_drive_name.find("./LogDriveNumber").text
fgi_status.update({name: status})
return fgi_status | [
"def",
"get_raid_fgi_status",
"(",
"report",
")",
":",
"fgi_status",
"=",
"{",
"}",
"raid_path",
"=",
"\"./Software/ServerView/ServerViewRaid\"",
"if",
"not",
"report",
".",
"find",
"(",
"raid_path",
")",
":",
"raise",
"SCCIInvalidInputError",
"(",
"\"ServerView RAI... | Gather fgi(foreground initialization) information of raid configuration
This function returns a fgi status which contains activity status
and its values from the report.
:param report: SCCI report information
:returns: dict of fgi status of logical_drives, such as Initializing (10%)
or Idle. e.g: {'0': 'Idle', '1': 'Initializing (10%)'}
:raises: SCCIInvalidInputError: fail report input.
SCCIRAIDNotReady: waiting for RAID configuration to complete. | [
"Gather",
"fgi",
"(",
"foreground",
"initialization",
")",
"information",
"of",
"raid",
"configuration"
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/scci.py#L631-L660 |
openstack/python-scciclient | scciclient/irmc/scci.py | get_firmware_upgrade_status | def get_firmware_upgrade_status(irmc_info, upgrade_type):
"""get firmware upgrade status of bios or irmc
:param irmc_info: dict of iRMC params to access the server node
{
'irmc_address': host,
'irmc_username': user_id,
'irmc_password': password,
'irmc_port': 80 or 443, default is 443,
'irmc_auth_method': 'basic' or 'digest', default is 'digest',
'irmc_client_timeout': timeout, default is 60,
...
}
:param upgrade_type: flag to check upgrade with bios or irmc
:raises: ISCCIInvalidInputError if port and/or auth_method params
are invalid
:raises: SCCIClientError if SCCI failed
"""
host = irmc_info.get('irmc_address')
userid = irmc_info.get('irmc_username')
password = irmc_info.get('irmc_password')
port = irmc_info.get('irmc_port', 443)
auth_method = irmc_info.get('irmc_auth_method', 'digest')
client_timeout = irmc_info.get('irmc_client_timeout', 60)
auth_obj = None
try:
protocol = {80: 'http', 443: 'https'}[port]
auth_obj = {
'basic': requests.auth.HTTPBasicAuth(userid, password),
'digest': requests.auth.HTTPDigestAuth(userid, password)
}[auth_method.lower()]
except KeyError:
raise SCCIInvalidInputError(
("Invalid port %(port)d or " +
"auth_method for method %(auth_method)s") %
{'port': port, 'auth_method': auth_method})
try:
if upgrade_type == 'bios':
upgrade_type = '/biosprogress'
elif upgrade_type == 'irmc':
upgrade_type = '/irmcprogress'
r = requests.get(protocol + '://' + host + upgrade_type,
verify=False,
timeout=(10, client_timeout),
allow_redirects=False,
auth=auth_obj)
if r.status_code not in (200, 201):
raise SCCIClientError(
('HTTP PROTOCOL ERROR, STATUS CODE = %s' %
str(r.status_code)))
upgrade_status_xml = ET.fromstring(r.text)
return upgrade_status_xml
except ET.ParseError as parse_error:
raise SCCIClientError(parse_error) | python | def get_firmware_upgrade_status(irmc_info, upgrade_type):
"""get firmware upgrade status of bios or irmc
:param irmc_info: dict of iRMC params to access the server node
{
'irmc_address': host,
'irmc_username': user_id,
'irmc_password': password,
'irmc_port': 80 or 443, default is 443,
'irmc_auth_method': 'basic' or 'digest', default is 'digest',
'irmc_client_timeout': timeout, default is 60,
...
}
:param upgrade_type: flag to check upgrade with bios or irmc
:raises: ISCCIInvalidInputError if port and/or auth_method params
are invalid
:raises: SCCIClientError if SCCI failed
"""
host = irmc_info.get('irmc_address')
userid = irmc_info.get('irmc_username')
password = irmc_info.get('irmc_password')
port = irmc_info.get('irmc_port', 443)
auth_method = irmc_info.get('irmc_auth_method', 'digest')
client_timeout = irmc_info.get('irmc_client_timeout', 60)
auth_obj = None
try:
protocol = {80: 'http', 443: 'https'}[port]
auth_obj = {
'basic': requests.auth.HTTPBasicAuth(userid, password),
'digest': requests.auth.HTTPDigestAuth(userid, password)
}[auth_method.lower()]
except KeyError:
raise SCCIInvalidInputError(
("Invalid port %(port)d or " +
"auth_method for method %(auth_method)s") %
{'port': port, 'auth_method': auth_method})
try:
if upgrade_type == 'bios':
upgrade_type = '/biosprogress'
elif upgrade_type == 'irmc':
upgrade_type = '/irmcprogress'
r = requests.get(protocol + '://' + host + upgrade_type,
verify=False,
timeout=(10, client_timeout),
allow_redirects=False,
auth=auth_obj)
if r.status_code not in (200, 201):
raise SCCIClientError(
('HTTP PROTOCOL ERROR, STATUS CODE = %s' %
str(r.status_code)))
upgrade_status_xml = ET.fromstring(r.text)
return upgrade_status_xml
except ET.ParseError as parse_error:
raise SCCIClientError(parse_error) | [
"def",
"get_firmware_upgrade_status",
"(",
"irmc_info",
",",
"upgrade_type",
")",
":",
"host",
"=",
"irmc_info",
".",
"get",
"(",
"'irmc_address'",
")",
"userid",
"=",
"irmc_info",
".",
"get",
"(",
"'irmc_username'",
")",
"password",
"=",
"irmc_info",
".",
"ge... | get firmware upgrade status of bios or irmc
:param irmc_info: dict of iRMC params to access the server node
{
'irmc_address': host,
'irmc_username': user_id,
'irmc_password': password,
'irmc_port': 80 or 443, default is 443,
'irmc_auth_method': 'basic' or 'digest', default is 'digest',
'irmc_client_timeout': timeout, default is 60,
...
}
:param upgrade_type: flag to check upgrade with bios or irmc
:raises: ISCCIInvalidInputError if port and/or auth_method params
are invalid
:raises: SCCIClientError if SCCI failed | [
"get",
"firmware",
"upgrade",
"status",
"of",
"bios",
"or",
"irmc"
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/scci.py#L663-L720 |
OSSOS/MOP | src/jjk/preproc/median.py | median | def median(ma):
""" do it row by row, to save memory...."""
_median = 0*ma[0].filled(fill_value=0)
for i in range(ma.shape[-1]):
t=xmedian(ma[:,:,i])
_median[:,i]=t
t=None
return _median | python | def median(ma):
""" do it row by row, to save memory...."""
_median = 0*ma[0].filled(fill_value=0)
for i in range(ma.shape[-1]):
t=xmedian(ma[:,:,i])
_median[:,i]=t
t=None
return _median | [
"def",
"median",
"(",
"ma",
")",
":",
"_median",
"=",
"0",
"*",
"ma",
"[",
"0",
"]",
".",
"filled",
"(",
"fill_value",
"=",
"0",
")",
"for",
"i",
"in",
"range",
"(",
"ma",
".",
"shape",
"[",
"-",
"1",
"]",
")",
":",
"t",
"=",
"xmedian",
"("... | do it row by row, to save memory.... | [
"do",
"it",
"row",
"by",
"row",
"to",
"save",
"memory",
"...."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/median.py#L3-L10 |
OSSOS/MOP | src/jjk/preproc/median.py | xmedian | def xmedian(ma):
""" Given a masked numpy array (build using numpy.ma class) return
the median value of the array."""
import numpy
_medianIndex = numpy.floor(ma.count(axis=0)/2.0)
_sortIndex = ma.argsort(kind='heapsort',axis=0)
_median = ma[0].filled(fill_value=0)*0
for idx in range(len(_sortIndex)):
_median = _median+_sortIndex[idx]*(_medianIndex==idx)
_medianIndex=_median
_median=0*_median
for idx in range(len(ma)):
_median = _median + ma[idx].filled(fill_value=0)*(_medianIndex==idx)
_sortIndex=None
_medianIndex=None
return _median | python | def xmedian(ma):
""" Given a masked numpy array (build using numpy.ma class) return
the median value of the array."""
import numpy
_medianIndex = numpy.floor(ma.count(axis=0)/2.0)
_sortIndex = ma.argsort(kind='heapsort',axis=0)
_median = ma[0].filled(fill_value=0)*0
for idx in range(len(_sortIndex)):
_median = _median+_sortIndex[idx]*(_medianIndex==idx)
_medianIndex=_median
_median=0*_median
for idx in range(len(ma)):
_median = _median + ma[idx].filled(fill_value=0)*(_medianIndex==idx)
_sortIndex=None
_medianIndex=None
return _median | [
"def",
"xmedian",
"(",
"ma",
")",
":",
"import",
"numpy",
"_medianIndex",
"=",
"numpy",
".",
"floor",
"(",
"ma",
".",
"count",
"(",
"axis",
"=",
"0",
")",
"/",
"2.0",
")",
"_sortIndex",
"=",
"ma",
".",
"argsort",
"(",
"kind",
"=",
"'heapsort'",
","... | Given a masked numpy array (build using numpy.ma class) return
the median value of the array. | [
"Given",
"a",
"masked",
"numpy",
"array",
"(",
"build",
"using",
"numpy",
".",
"ma",
"class",
")",
"return",
"the",
"median",
"value",
"of",
"the",
"array",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/median.py#L12-L27 |
OSSOS/MOP | src/ossos/core/ossos/naming.py | _generate_provisional_name | def _generate_provisional_name(q, astrom_header, fits_header):
"""
Generates a name for an object given the information in its astrom
observation header and FITS header.
:param q: a queue of provisional names to return.
:type q: Queue
:param astrom_header:
:param fits_header:
"""
while True:
ef = get_epoch_field(astrom_header, fits_header)
epoch_field = ef[0] + ef[1]
count = storage.increment_object_counter(storage.MEASURE3, epoch_field)
try:
q.put(ef[1] + count)
except:
break | python | def _generate_provisional_name(q, astrom_header, fits_header):
"""
Generates a name for an object given the information in its astrom
observation header and FITS header.
:param q: a queue of provisional names to return.
:type q: Queue
:param astrom_header:
:param fits_header:
"""
while True:
ef = get_epoch_field(astrom_header, fits_header)
epoch_field = ef[0] + ef[1]
count = storage.increment_object_counter(storage.MEASURE3, epoch_field)
try:
q.put(ef[1] + count)
except:
break | [
"def",
"_generate_provisional_name",
"(",
"q",
",",
"astrom_header",
",",
"fits_header",
")",
":",
"while",
"True",
":",
"ef",
"=",
"get_epoch_field",
"(",
"astrom_header",
",",
"fits_header",
")",
"epoch_field",
"=",
"ef",
"[",
"0",
"]",
"+",
"ef",
"[",
"... | Generates a name for an object given the information in its astrom
observation header and FITS header.
:param q: a queue of provisional names to return.
:type q: Queue
:param astrom_header:
:param fits_header: | [
"Generates",
"a",
"name",
"for",
"an",
"object",
"given",
"the",
"information",
"in",
"its",
"astrom",
"observation",
"header",
"and",
"FITS",
"header",
".",
":",
"param",
"q",
":",
"a",
"queue",
"of",
"provisional",
"names",
"to",
"return",
".",
":",
"t... | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/naming.py#L9-L25 |
OSSOS/MOP | src/ossos/core/ossos/gui/controllers.py | AbstractController.on_toggle_autoplay_key | def on_toggle_autoplay_key(self):
"""
The user has pressed the keybind for toggling autoplay.
"""
if self.autoplay_manager.is_running():
self.autoplay_manager.stop_autoplay()
self.view.set_autoplay(False)
else:
self.autoplay_manager.start_autoplay()
self.view.set_autoplay(True) | python | def on_toggle_autoplay_key(self):
"""
The user has pressed the keybind for toggling autoplay.
"""
if self.autoplay_manager.is_running():
self.autoplay_manager.stop_autoplay()
self.view.set_autoplay(False)
else:
self.autoplay_manager.start_autoplay()
self.view.set_autoplay(True) | [
"def",
"on_toggle_autoplay_key",
"(",
"self",
")",
":",
"if",
"self",
".",
"autoplay_manager",
".",
"is_running",
"(",
")",
":",
"self",
".",
"autoplay_manager",
".",
"stop_autoplay",
"(",
")",
"self",
".",
"view",
".",
"set_autoplay",
"(",
"False",
")",
"... | The user has pressed the keybind for toggling autoplay. | [
"The",
"user",
"has",
"pressed",
"the",
"keybind",
"for",
"toggling",
"autoplay",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/gui/controllers.py#L161-L170 |
OSSOS/MOP | src/ossos/core/ossos/gui/controllers.py | ProcessRealsController.on_accept | def on_accept(self, auto=False):
"""
Initiates acceptance procedure, gathering required data.
@param auto: Set on_accept to automatic measure of source?
"""
if self.model.is_current_source_named():
provisional_name = self.model.get_current_source_name()
else:
provisional_name = self._generate_provisional_name()
band = self.model.get_current_band()
logger.debug("Got band {} and provisional_name {}".format(band, provisional_name))
default_comment = ""
source_cutout = self.model.get_current_cutout()
if not auto:
result = self.view.ds9.get('imexam key coordinate wcs fk5 degrees')
# result = display.get("""imexam key coordinate $x $y $filename""")
if not isinstance(result, str):
print result
result = str(result)
values = result.split()
ra = Quantity(float(values[1]), unit=units.degree)
dec = Quantity(float(values[2]), unit=units.degree)
key = values[0]
(x, y, hdulist_index) = source_cutout.world2pix(ra, dec, usepv=False)
source_cutout.update_pixel_location((float(x), float(y)), hdulist_index)
source_cutout.reading.inverted = False
(ra, dec) = source_cutout.pix2world(x, y, hdulist_index, usepv=True)
self.place_marker(ra, dec, radius=int(source_cutout.apcor.ap_in*0.185+1)*units.arcsec,
colour='green', force=True)
else:
key = isinstance(auto, bool) and " " or auto
ra = source_cutout.reading.ra * units.degree
dec = source_cutout.reading.dec * units.degree
self.place_marker(ra, dec, radius=int(source_cutout.apcor.ap_in*0.185+1)*units.arcsec,
colour='cyan', force=True)
(x, y, hdulist_index) = source_cutout.world2pix(ra, dec, usepv=False)
source_cutout.update_pixel_location((float(x), float(y)), hdulist_index)
# now we've reset the pixel locations, so they are no longer inverted.
source_cutout.reading.inverted = False
marked_skycoord = source_cutout.reading.sky_coord
try:
phot = self.model.get_current_source_observed_magnitude()
cen_x = phot['XCENTER'][0]
cen_y = phot['YCENTER'][0]
obs_mag = phot['MAG'][0]
obs_mag_err = phot['MERR'][0]
if phot.mask[0]['MAG'] or phot.mask[0]['MERR']:
obs_mag = None
obs_mag_err = None
phot_failure = phot['PIER'][0] != 0 or phot.mask[0]['MAG']
sky_failure = phot['SIER'][0] != 0 or phot.mask[0]['MAG']
cen_failure = phot['CIER'][0] != 0
if key != 'h':
source_cutout.update_pixel_location((cen_x, cen_y), hdulist_index)
except Exception as er:
print("DAOPhot failure: {}".format(er))
logger.critical("PHOT ERROR: {}".format(er))
phot_failure = sky_failure = cen_failure = True
obs_mag = None
obs_mag_err = None
band = ''
default_comment = str(er)
obs_mag = phot_failure and None or obs_mag
obs_mag_err = phot_failure and None or obs_mag_err
self.place_marker(source_cutout.ra * units.degree, source_cutout.dec*units.degree,
radius=int(source_cutout.apcor.ap_in*0.185+1)*units.arcsec,
colour='white',
force=True)
# compare the RA/DEC position of the reading now that we have measured it to the initial value.
if marked_skycoord.separation(source_cutout.reading.sky_coord) > 1 * units.arcsec or cen_failure:
# check if the user wants to use the selected location or the DAOPhot centroid.
self.view.show_offset_source_dialog(source_cutout.reading.sky_coord, marked_skycoord)
note1_default = ""
if self.model.is_current_source_adjusted():
note1_default = config.read("MPC.NOTE1_HAND_ADJUSTED")
else:
for note in config.read("MPC.NOTE1OPTIONS"):
if note.lower().startswith(key):
note1_default = note
break
note1 = len(note1_default) > 0 and note1_default[0] or note1_default
if isinstance(self, ProcessTracksController):
this_observation = mpc.Observation(
null_observation=False,
provisional_name=provisional_name,
note1=note1,
note2=config.read('MPC.NOTE2DEFAULT')[0],
date=self.model.get_current_observation_date(),
ra=self.model.get_current_ra(),
dec=self.model.get_current_dec(),
mag=obs_mag,
mag_err=obs_mag_err,
band=band,
observatory_code=config.read("MPC.DEFAULT_OBSERVATORY_CODE"),
discovery=self.is_discovery,
comment=None,
xpos=source_cutout.reading.x,
ypos=source_cutout.reading.y,
frame=source_cutout.reading.obs.rawname,
astrometric_level=source_cutout.astrom_header.get('ASTLEVEL', None))
try:
previous_observations = self.model.get_writer().get_chronological_buffered_observations()
for idx, observation in enumerate(previous_observations):
try:
if observation.comment.frame.strip() == this_observation.comment.frame.strip():
previous_observations[idx] = this_observation
this_observation = False
break
except Exception as ex:
print type(ex), str(ex)
if this_observation:
previous_observations.append(this_observation)
print Orbfit(previous_observations).summarize()
except Exception as ex:
logger.error(str(type(ex))+" "+str(ex))
print "Failed to compute preliminary orbit."
if obs_mag < 24 and auto is not False:
self.on_do_accept(None,
provisional_name,
sky_failure and "S poor sky" or note1_default,
config.read("MPC.NOTE2DEFAULT"),
self.model.get_current_observation_date(),
self.model.get_current_ra(),
self.model.get_current_dec(),
obs_mag,
obs_mag_err,
band,
config.read("MPC.DEFAULT_OBSERVATORY_CODE"), ""
)
else:
self.view.show_accept_source_dialog(
provisional_name,
self.model.get_current_observation_date(),
self.model.get_current_ra(),
self.model.get_current_dec(),
obs_mag,
obs_mag_err,
band,
note1_choices=config.read("MPC.NOTE1OPTIONS"),
note2_choices=config.read("MPC.NOTE2OPTIONS"),
note1_default=sky_failure and "S poor sky" or note1_default,
note2_default=config.read("MPC.NOTE2DEFAULT"),
default_observatory_code=config.read("MPC.DEFAULT_OBSERVATORY_CODE"),
default_comment=default_comment,
phot_failure=phot_failure,
pixel_x=source_cutout.pixel_x,
pixel_y=source_cutout.pixel_y) | python | def on_accept(self, auto=False):
"""
Initiates acceptance procedure, gathering required data.
@param auto: Set on_accept to automatic measure of source?
"""
if self.model.is_current_source_named():
provisional_name = self.model.get_current_source_name()
else:
provisional_name = self._generate_provisional_name()
band = self.model.get_current_band()
logger.debug("Got band {} and provisional_name {}".format(band, provisional_name))
default_comment = ""
source_cutout = self.model.get_current_cutout()
if not auto:
result = self.view.ds9.get('imexam key coordinate wcs fk5 degrees')
# result = display.get("""imexam key coordinate $x $y $filename""")
if not isinstance(result, str):
print result
result = str(result)
values = result.split()
ra = Quantity(float(values[1]), unit=units.degree)
dec = Quantity(float(values[2]), unit=units.degree)
key = values[0]
(x, y, hdulist_index) = source_cutout.world2pix(ra, dec, usepv=False)
source_cutout.update_pixel_location((float(x), float(y)), hdulist_index)
source_cutout.reading.inverted = False
(ra, dec) = source_cutout.pix2world(x, y, hdulist_index, usepv=True)
self.place_marker(ra, dec, radius=int(source_cutout.apcor.ap_in*0.185+1)*units.arcsec,
colour='green', force=True)
else:
key = isinstance(auto, bool) and " " or auto
ra = source_cutout.reading.ra * units.degree
dec = source_cutout.reading.dec * units.degree
self.place_marker(ra, dec, radius=int(source_cutout.apcor.ap_in*0.185+1)*units.arcsec,
colour='cyan', force=True)
(x, y, hdulist_index) = source_cutout.world2pix(ra, dec, usepv=False)
source_cutout.update_pixel_location((float(x), float(y)), hdulist_index)
# now we've reset the pixel locations, so they are no longer inverted.
source_cutout.reading.inverted = False
marked_skycoord = source_cutout.reading.sky_coord
try:
phot = self.model.get_current_source_observed_magnitude()
cen_x = phot['XCENTER'][0]
cen_y = phot['YCENTER'][0]
obs_mag = phot['MAG'][0]
obs_mag_err = phot['MERR'][0]
if phot.mask[0]['MAG'] or phot.mask[0]['MERR']:
obs_mag = None
obs_mag_err = None
phot_failure = phot['PIER'][0] != 0 or phot.mask[0]['MAG']
sky_failure = phot['SIER'][0] != 0 or phot.mask[0]['MAG']
cen_failure = phot['CIER'][0] != 0
if key != 'h':
source_cutout.update_pixel_location((cen_x, cen_y), hdulist_index)
except Exception as er:
print("DAOPhot failure: {}".format(er))
logger.critical("PHOT ERROR: {}".format(er))
phot_failure = sky_failure = cen_failure = True
obs_mag = None
obs_mag_err = None
band = ''
default_comment = str(er)
obs_mag = phot_failure and None or obs_mag
obs_mag_err = phot_failure and None or obs_mag_err
self.place_marker(source_cutout.ra * units.degree, source_cutout.dec*units.degree,
radius=int(source_cutout.apcor.ap_in*0.185+1)*units.arcsec,
colour='white',
force=True)
# compare the RA/DEC position of the reading now that we have measured it to the initial value.
if marked_skycoord.separation(source_cutout.reading.sky_coord) > 1 * units.arcsec or cen_failure:
# check if the user wants to use the selected location or the DAOPhot centroid.
self.view.show_offset_source_dialog(source_cutout.reading.sky_coord, marked_skycoord)
note1_default = ""
if self.model.is_current_source_adjusted():
note1_default = config.read("MPC.NOTE1_HAND_ADJUSTED")
else:
for note in config.read("MPC.NOTE1OPTIONS"):
if note.lower().startswith(key):
note1_default = note
break
note1 = len(note1_default) > 0 and note1_default[0] or note1_default
if isinstance(self, ProcessTracksController):
this_observation = mpc.Observation(
null_observation=False,
provisional_name=provisional_name,
note1=note1,
note2=config.read('MPC.NOTE2DEFAULT')[0],
date=self.model.get_current_observation_date(),
ra=self.model.get_current_ra(),
dec=self.model.get_current_dec(),
mag=obs_mag,
mag_err=obs_mag_err,
band=band,
observatory_code=config.read("MPC.DEFAULT_OBSERVATORY_CODE"),
discovery=self.is_discovery,
comment=None,
xpos=source_cutout.reading.x,
ypos=source_cutout.reading.y,
frame=source_cutout.reading.obs.rawname,
astrometric_level=source_cutout.astrom_header.get('ASTLEVEL', None))
try:
previous_observations = self.model.get_writer().get_chronological_buffered_observations()
for idx, observation in enumerate(previous_observations):
try:
if observation.comment.frame.strip() == this_observation.comment.frame.strip():
previous_observations[idx] = this_observation
this_observation = False
break
except Exception as ex:
print type(ex), str(ex)
if this_observation:
previous_observations.append(this_observation)
print Orbfit(previous_observations).summarize()
except Exception as ex:
logger.error(str(type(ex))+" "+str(ex))
print "Failed to compute preliminary orbit."
if obs_mag < 24 and auto is not False:
self.on_do_accept(None,
provisional_name,
sky_failure and "S poor sky" or note1_default,
config.read("MPC.NOTE2DEFAULT"),
self.model.get_current_observation_date(),
self.model.get_current_ra(),
self.model.get_current_dec(),
obs_mag,
obs_mag_err,
band,
config.read("MPC.DEFAULT_OBSERVATORY_CODE"), ""
)
else:
self.view.show_accept_source_dialog(
provisional_name,
self.model.get_current_observation_date(),
self.model.get_current_ra(),
self.model.get_current_dec(),
obs_mag,
obs_mag_err,
band,
note1_choices=config.read("MPC.NOTE1OPTIONS"),
note2_choices=config.read("MPC.NOTE2OPTIONS"),
note1_default=sky_failure and "S poor sky" or note1_default,
note2_default=config.read("MPC.NOTE2DEFAULT"),
default_observatory_code=config.read("MPC.DEFAULT_OBSERVATORY_CODE"),
default_comment=default_comment,
phot_failure=phot_failure,
pixel_x=source_cutout.pixel_x,
pixel_y=source_cutout.pixel_y) | [
"def",
"on_accept",
"(",
"self",
",",
"auto",
"=",
"False",
")",
":",
"if",
"self",
".",
"model",
".",
"is_current_source_named",
"(",
")",
":",
"provisional_name",
"=",
"self",
".",
"model",
".",
"get_current_source_name",
"(",
")",
"else",
":",
"provisio... | Initiates acceptance procedure, gathering required data.
@param auto: Set on_accept to automatic measure of source? | [
"Initiates",
"acceptance",
"procedure",
"gathering",
"required",
"data",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/gui/controllers.py#L234-L392 |
OSSOS/MOP | src/ossos/core/ossos/gui/controllers.py | ProcessRealsController.on_do_accept | def on_do_accept(self,
minor_planet_number,
provisional_name,
note1,
note2,
date_of_obs,
ra,
dec,
obs_mag,
obs_mag_err,
band,
observatory_code,
comment):
"""
After a source has been mark for acceptance create an MPC Observation record.
@param minor_planet_number: The MPC Number associated with the object
@param provisional_name: A provisional name associated with the object
@param note1: The observational quality note
@param note2: The observational circumstance note
@param date_of_obs: Date of the observation as a Time object.
@param ra: RA in degrees
@param dec: DE in degrees
@param obs_mag: observed magnitude.
@param obs_mag_err: Uncertainty in the observed magnitude.
@param band: filter/band of the observations
@param observatory_code: MPC Observatory Code of telescope.
@param comment: A free form comment (not part of MPC standard record)
"""
# Just extract the character code from the notes, not the
# full description
note1_code = note1.split(" ")[0]
note2_code = note2.split(" ")[0]
self.view.close_accept_source_dialog()
self.model.set_current_source_name(provisional_name)
source_cutout = self.model.get_current_cutout()
mpc_observation = mpc.Observation(
null_observation=False,
provisional_name=provisional_name,
note1=note1_code,
note2=note2_code,
date=date_of_obs,
ra=ra,
dec=dec,
mag=obs_mag,
mag_err=obs_mag_err,
band=band,
observatory_code=observatory_code,
discovery=self.is_discovery,
comment=comment,
xpos=source_cutout.reading.x,
ypos=source_cutout.reading.y,
frame=source_cutout.reading.obs.rawname,
astrometric_level=source_cutout.astrom_header.get('ASTLEVEL', None)
)
# Store the observation into the model.
data = self.model.get_current_workunit().data
key = mpc_observation.comment.frame.strip()
data.mpc_observations[key] = mpc_observation
# And write this observation out.
self.model.get_writer().write(mpc_observation)
# Mark the current item of the work unit as accepted.
self.model.accept_current_item()
# Detemine if the display should be reset.
reset_frame = False
if self.model.get_current_workunit().get_current_source_readings().is_on_last_item():
self.view.clear()
reset_frame = True
self.model.next_item()
if reset_frame:
self.view.frame(1) | python | def on_do_accept(self,
minor_planet_number,
provisional_name,
note1,
note2,
date_of_obs,
ra,
dec,
obs_mag,
obs_mag_err,
band,
observatory_code,
comment):
"""
After a source has been mark for acceptance create an MPC Observation record.
@param minor_planet_number: The MPC Number associated with the object
@param provisional_name: A provisional name associated with the object
@param note1: The observational quality note
@param note2: The observational circumstance note
@param date_of_obs: Date of the observation as a Time object.
@param ra: RA in degrees
@param dec: DE in degrees
@param obs_mag: observed magnitude.
@param obs_mag_err: Uncertainty in the observed magnitude.
@param band: filter/band of the observations
@param observatory_code: MPC Observatory Code of telescope.
@param comment: A free form comment (not part of MPC standard record)
"""
# Just extract the character code from the notes, not the
# full description
note1_code = note1.split(" ")[0]
note2_code = note2.split(" ")[0]
self.view.close_accept_source_dialog()
self.model.set_current_source_name(provisional_name)
source_cutout = self.model.get_current_cutout()
mpc_observation = mpc.Observation(
null_observation=False,
provisional_name=provisional_name,
note1=note1_code,
note2=note2_code,
date=date_of_obs,
ra=ra,
dec=dec,
mag=obs_mag,
mag_err=obs_mag_err,
band=band,
observatory_code=observatory_code,
discovery=self.is_discovery,
comment=comment,
xpos=source_cutout.reading.x,
ypos=source_cutout.reading.y,
frame=source_cutout.reading.obs.rawname,
astrometric_level=source_cutout.astrom_header.get('ASTLEVEL', None)
)
# Store the observation into the model.
data = self.model.get_current_workunit().data
key = mpc_observation.comment.frame.strip()
data.mpc_observations[key] = mpc_observation
# And write this observation out.
self.model.get_writer().write(mpc_observation)
# Mark the current item of the work unit as accepted.
self.model.accept_current_item()
# Detemine if the display should be reset.
reset_frame = False
if self.model.get_current_workunit().get_current_source_readings().is_on_last_item():
self.view.clear()
reset_frame = True
self.model.next_item()
if reset_frame:
self.view.frame(1) | [
"def",
"on_do_accept",
"(",
"self",
",",
"minor_planet_number",
",",
"provisional_name",
",",
"note1",
",",
"note2",
",",
"date_of_obs",
",",
"ra",
",",
"dec",
",",
"obs_mag",
",",
"obs_mag_err",
",",
"band",
",",
"observatory_code",
",",
"comment",
")",
":"... | After a source has been mark for acceptance create an MPC Observation record.
@param minor_planet_number: The MPC Number associated with the object
@param provisional_name: A provisional name associated with the object
@param note1: The observational quality note
@param note2: The observational circumstance note
@param date_of_obs: Date of the observation as a Time object.
@param ra: RA in degrees
@param dec: DE in degrees
@param obs_mag: observed magnitude.
@param obs_mag_err: Uncertainty in the observed magnitude.
@param band: filter/band of the observations
@param observatory_code: MPC Observatory Code of telescope.
@param comment: A free form comment (not part of MPC standard record) | [
"After",
"a",
"source",
"has",
"been",
"mark",
"for",
"acceptance",
"create",
"an",
"MPC",
"Observation",
"record",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/gui/controllers.py#L394-L471 |
OSSOS/MOP | src/ossos/core/ossos/gui/controllers.py | ProcessVettingController.on_do_accept | def on_do_accept(self, comment):
"""
WARNING WARNING: THIS IS ACUTALLY on_do_accept BUT HACKED.
TODO: Make it so that we have a short 'vetting' accept dialogue. Current accept dialogue too heavy for
this part of process, thus the hack.
Process the rejection of a vetting candidate, includes writing a comment to file.
@param comment:
@return:
"""
self.view.close_vetting_accept_source_dialog()
# Set to None if blank
if len(comment.strip()) == 0:
comment = None
writer = self.model.get_writer()
writer.write_source(self.model.get_current_source(), comment=comment, reject=False)
self.model.accept_current_item()
self.view.clear()
self.model.next_item() | python | def on_do_accept(self, comment):
"""
WARNING WARNING: THIS IS ACUTALLY on_do_accept BUT HACKED.
TODO: Make it so that we have a short 'vetting' accept dialogue. Current accept dialogue too heavy for
this part of process, thus the hack.
Process the rejection of a vetting candidate, includes writing a comment to file.
@param comment:
@return:
"""
self.view.close_vetting_accept_source_dialog()
# Set to None if blank
if len(comment.strip()) == 0:
comment = None
writer = self.model.get_writer()
writer.write_source(self.model.get_current_source(), comment=comment, reject=False)
self.model.accept_current_item()
self.view.clear()
self.model.next_item() | [
"def",
"on_do_accept",
"(",
"self",
",",
"comment",
")",
":",
"self",
".",
"view",
".",
"close_vetting_accept_source_dialog",
"(",
")",
"# Set to None if blank",
"if",
"len",
"(",
"comment",
".",
"strip",
"(",
")",
")",
"==",
"0",
":",
"comment",
"=",
"Non... | WARNING WARNING: THIS IS ACUTALLY on_do_accept BUT HACKED.
TODO: Make it so that we have a short 'vetting' accept dialogue. Current accept dialogue too heavy for
this part of process, thus the hack.
Process the rejection of a vetting candidate, includes writing a comment to file.
@param comment:
@return: | [
"WARNING",
"WARNING",
":",
"THIS",
"IS",
"ACUTALLY",
"on_do_accept",
"BUT",
"HACKED",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/gui/controllers.py#L563-L585 |
OSSOS/MOP | src/ossos/core/ossos/gui/controllers.py | ProcessTracksController.on_load_comparison | def on_load_comparison(self, research=False):
"""
Display the comparison image
@param research: find a new comparison image even if one already known?
"""
cutout = self.model.get_current_cutout()
if research:
cutout.comparison_image_index = None
comparison_image = cutout.comparison_image
if comparison_image is None:
print "Failed to load comparison image: {}".format(cutout.comparison_image_list[cutout.comparison_image_index])
else:
self.view.display(cutout.comparison_image, self.use_pixel_coords)
self.view.align(self.model.get_current_cutout(),
self.model.get_current_reading(),
self.model.get_current_source())
self.model.get_current_workunit().previous_obs()
self.model.acknowledge_image_displayed() | python | def on_load_comparison(self, research=False):
"""
Display the comparison image
@param research: find a new comparison image even if one already known?
"""
cutout = self.model.get_current_cutout()
if research:
cutout.comparison_image_index = None
comparison_image = cutout.comparison_image
if comparison_image is None:
print "Failed to load comparison image: {}".format(cutout.comparison_image_list[cutout.comparison_image_index])
else:
self.view.display(cutout.comparison_image, self.use_pixel_coords)
self.view.align(self.model.get_current_cutout(),
self.model.get_current_reading(),
self.model.get_current_source())
self.model.get_current_workunit().previous_obs()
self.model.acknowledge_image_displayed() | [
"def",
"on_load_comparison",
"(",
"self",
",",
"research",
"=",
"False",
")",
":",
"cutout",
"=",
"self",
".",
"model",
".",
"get_current_cutout",
"(",
")",
"if",
"research",
":",
"cutout",
".",
"comparison_image_index",
"=",
"None",
"comparison_image",
"=",
... | Display the comparison image
@param research: find a new comparison image even if one already known? | [
"Display",
"the",
"comparison",
"image"
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/gui/controllers.py#L651-L669 |
OSSOS/MOP | src/ossos/core/scripts/step2.py | compute_trans | def compute_trans(expnums, ccd, version, prefix=None, default="WCS"):
"""
Pull the astrometric header for each image, compute an x/y transform and compare to trans.jmp
this one overides trans.jmp if they are very different.
@param expnums:
@param ccd:
@param version:
@param prefix:
@return: None
"""
wcs_dict = {}
for expnum in expnums:
try:
# TODO This assumes that the image is already N/E flipped.
# If compute_trans is called after the image is retrieved from archive then we get the disk version.
filename = storage.get_image(expnum, ccd, version, prefix=prefix)
this_wcs = wcs.WCS(fits.open(filename)[0].header)
except Exception as err:
logging.warning("WCS Trans compute failed. {}".format(str(err)))
return
wcs_dict[expnum] = this_wcs
x0 = wcs_dict[expnums[0]].header['NAXIS1'] / 2.0
y0 = wcs_dict[expnums[0]].header['NAXIS2'] / 2.0
(ra0, dec0) = wcs_dict[expnums[0]].xy2sky(x0, y0)
result = ""
for expnum in expnums:
filename = storage.get_file(expnum, ccd, version, ext='.trans.jmp', prefix=prefix)
jmp_trans = file(filename, 'r').readline().split()
(x, y) = wcs_dict[expnum].sky2xy(ra0, dec0)
x1 = float(jmp_trans[0]) + float(jmp_trans[1]) * x + float(jmp_trans[2]) * y
y1 = float(jmp_trans[3]) + float(jmp_trans[4]) * x + float(jmp_trans[5]) * y
dr = math.sqrt((x1 - x0) ** 2 + (y1 - y0) ** 2)
if dr > 0.5:
result += "WARNING: WCS-JMP transforms mis-matched {} reverting to using {}.\n".format(expnum, default)
if default == "WCS":
uri = storage.dbimages_uri(expnum, ccd, version, ext='.trans.jmp', prefix=prefix)
filename = os.path.basename(uri)
trans = file(filename, 'w')
trans.write("{:5.2f} 1. 0. {:5.2f} 0. 1.\n".format(x0 - x, y0 - y))
trans.close()
else:
result += "WCS-JMP transforms match {}\n".format(expnum)
return result | python | def compute_trans(expnums, ccd, version, prefix=None, default="WCS"):
"""
Pull the astrometric header for each image, compute an x/y transform and compare to trans.jmp
this one overides trans.jmp if they are very different.
@param expnums:
@param ccd:
@param version:
@param prefix:
@return: None
"""
wcs_dict = {}
for expnum in expnums:
try:
# TODO This assumes that the image is already N/E flipped.
# If compute_trans is called after the image is retrieved from archive then we get the disk version.
filename = storage.get_image(expnum, ccd, version, prefix=prefix)
this_wcs = wcs.WCS(fits.open(filename)[0].header)
except Exception as err:
logging.warning("WCS Trans compute failed. {}".format(str(err)))
return
wcs_dict[expnum] = this_wcs
x0 = wcs_dict[expnums[0]].header['NAXIS1'] / 2.0
y0 = wcs_dict[expnums[0]].header['NAXIS2'] / 2.0
(ra0, dec0) = wcs_dict[expnums[0]].xy2sky(x0, y0)
result = ""
for expnum in expnums:
filename = storage.get_file(expnum, ccd, version, ext='.trans.jmp', prefix=prefix)
jmp_trans = file(filename, 'r').readline().split()
(x, y) = wcs_dict[expnum].sky2xy(ra0, dec0)
x1 = float(jmp_trans[0]) + float(jmp_trans[1]) * x + float(jmp_trans[2]) * y
y1 = float(jmp_trans[3]) + float(jmp_trans[4]) * x + float(jmp_trans[5]) * y
dr = math.sqrt((x1 - x0) ** 2 + (y1 - y0) ** 2)
if dr > 0.5:
result += "WARNING: WCS-JMP transforms mis-matched {} reverting to using {}.\n".format(expnum, default)
if default == "WCS":
uri = storage.dbimages_uri(expnum, ccd, version, ext='.trans.jmp', prefix=prefix)
filename = os.path.basename(uri)
trans = file(filename, 'w')
trans.write("{:5.2f} 1. 0. {:5.2f} 0. 1.\n".format(x0 - x, y0 - y))
trans.close()
else:
result += "WCS-JMP transforms match {}\n".format(expnum)
return result | [
"def",
"compute_trans",
"(",
"expnums",
",",
"ccd",
",",
"version",
",",
"prefix",
"=",
"None",
",",
"default",
"=",
"\"WCS\"",
")",
":",
"wcs_dict",
"=",
"{",
"}",
"for",
"expnum",
"in",
"expnums",
":",
"try",
":",
"# TODO This assumes that the image is alr... | Pull the astrometric header for each image, compute an x/y transform and compare to trans.jmp
this one overides trans.jmp if they are very different.
@param expnums:
@param ccd:
@param version:
@param prefix:
@return: None | [
"Pull",
"the",
"astrometric",
"header",
"for",
"each",
"image",
"compute",
"an",
"x",
"/",
"y",
"transform",
"and",
"compare",
"to",
"trans",
".",
"jmp"
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/scripts/step2.py#L40-L83 |
OSSOS/MOP | src/ossos/core/scripts/step2.py | step2 | def step2(expnums, ccd, version, prefix=None, dry_run=False, default="WCS"):
"""run the actual step2 on the given exp/ccd combo"""
jmp_trans = ['step2ajmp']
jmp_args = ['step2bjmp']
matt_args = ['step2matt_jmp']
idx = 0
for expnum in expnums:
jmp_args.append(
storage.get_file(expnum, ccd=ccd, version=version, ext='obj.jmp', prefix=prefix)[0:-8]
)
jmp_trans.append(
storage.get_file(expnum, ccd=ccd, version=version, ext='obj.jmp', prefix=prefix)[0:-8]
)
idx += 1
matt_args.append('-f%d' % idx)
matt_args.append(
storage.get_file(expnum, ccd=ccd, version=version, ext='obj.matt', prefix=prefix)[0:-9]
)
logging.info(util.exec_prog(jmp_trans))
if default == "WCS":
logging.info(compute_trans(expnums, ccd, version, prefix, default=default))
logging.info(util.exec_prog(jmp_args))
logging.info(util.exec_prog(matt_args))
## check that the shifts from step2 are rational
check_args = ['checktrans']
if os.access('proc-these-files', os.R_OK):
os.unlink('proc-these-files')
ptf = open('proc-these-files', 'w')
ptf.write("# A dummy file that is created so checktrans could run.\n")
ptf.write("# Frame FWHM PSF?\n")
for expnum in expnums:
filename = os.path.splitext(storage.get_image(expnum, ccd, version=version, prefix=prefix))[0]
if not os.access(filename + ".bright.psf", os.R_OK):
os.link(filename + ".bright.jmp", filename + ".bright.psf")
if not os.access(filename + ".obj.psf", os.R_OK):
os.link(filename + ".obj.jmp", filename + ".obj.psf")
ptf.write("{:>19s}{:>10.1f}{:>5s}\n".format(filename,
_FWHM,
"NO"))
ptf.close()
if os.access('BAD_TRANS', os.F_OK):
os.unlink('BAD_TRANS')
logging.info(util.exec_prog(check_args))
if os.access('BAD_TRANS', os.F_OK):
raise OSError(errno.EBADMSG, 'BAD_TRANS')
if os.access('proc-these-files', os.F_OK):
os.unlink('proc-these-files')
if dry_run:
return
for expnum in expnums:
for ext in ['unid.jmp', 'unid.matt', 'trans.jmp']:
uri = storage.dbimages_uri(expnum, ccd=ccd, version=version, ext=ext, prefix=prefix)
filename = os.path.basename(uri)
storage.copy(filename, uri)
return | python | def step2(expnums, ccd, version, prefix=None, dry_run=False, default="WCS"):
"""run the actual step2 on the given exp/ccd combo"""
jmp_trans = ['step2ajmp']
jmp_args = ['step2bjmp']
matt_args = ['step2matt_jmp']
idx = 0
for expnum in expnums:
jmp_args.append(
storage.get_file(expnum, ccd=ccd, version=version, ext='obj.jmp', prefix=prefix)[0:-8]
)
jmp_trans.append(
storage.get_file(expnum, ccd=ccd, version=version, ext='obj.jmp', prefix=prefix)[0:-8]
)
idx += 1
matt_args.append('-f%d' % idx)
matt_args.append(
storage.get_file(expnum, ccd=ccd, version=version, ext='obj.matt', prefix=prefix)[0:-9]
)
logging.info(util.exec_prog(jmp_trans))
if default == "WCS":
logging.info(compute_trans(expnums, ccd, version, prefix, default=default))
logging.info(util.exec_prog(jmp_args))
logging.info(util.exec_prog(matt_args))
## check that the shifts from step2 are rational
check_args = ['checktrans']
if os.access('proc-these-files', os.R_OK):
os.unlink('proc-these-files')
ptf = open('proc-these-files', 'w')
ptf.write("# A dummy file that is created so checktrans could run.\n")
ptf.write("# Frame FWHM PSF?\n")
for expnum in expnums:
filename = os.path.splitext(storage.get_image(expnum, ccd, version=version, prefix=prefix))[0]
if not os.access(filename + ".bright.psf", os.R_OK):
os.link(filename + ".bright.jmp", filename + ".bright.psf")
if not os.access(filename + ".obj.psf", os.R_OK):
os.link(filename + ".obj.jmp", filename + ".obj.psf")
ptf.write("{:>19s}{:>10.1f}{:>5s}\n".format(filename,
_FWHM,
"NO"))
ptf.close()
if os.access('BAD_TRANS', os.F_OK):
os.unlink('BAD_TRANS')
logging.info(util.exec_prog(check_args))
if os.access('BAD_TRANS', os.F_OK):
raise OSError(errno.EBADMSG, 'BAD_TRANS')
if os.access('proc-these-files', os.F_OK):
os.unlink('proc-these-files')
if dry_run:
return
for expnum in expnums:
for ext in ['unid.jmp', 'unid.matt', 'trans.jmp']:
uri = storage.dbimages_uri(expnum, ccd=ccd, version=version, ext=ext, prefix=prefix)
filename = os.path.basename(uri)
storage.copy(filename, uri)
return | [
"def",
"step2",
"(",
"expnums",
",",
"ccd",
",",
"version",
",",
"prefix",
"=",
"None",
",",
"dry_run",
"=",
"False",
",",
"default",
"=",
"\"WCS\"",
")",
":",
"jmp_trans",
"=",
"[",
"'step2ajmp'",
"]",
"jmp_args",
"=",
"[",
"'step2bjmp'",
"]",
"matt_a... | run the actual step2 on the given exp/ccd combo | [
"run",
"the",
"actual",
"step2",
"on",
"the",
"given",
"exp",
"/",
"ccd",
"combo"
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/scripts/step2.py#L86-L152 |
OSSOS/MOP | src/ossos/plotting/scripts/plot_aq.py | parse_nate_sims | def parse_nate_sims(path):
'''
parts0.dat) contains the id number, particle fraction (ignore) a, ecc, inc, long. asc., arg. per, and mean anomaly
for every particle in the simulation at t=0.
The second (parts3999.dat) contains the same info at t=3.999 Gyrs for these particles.
:return:
'''
zerostate = pandas.read_table(path + 'parts0.dat', delim_whitespace=True)
endstate = pandas.read_table(path + 'parts3999.dat', delim_whitespace=True)
# add perihelion
zerostate['q'] = zerostate['a'] * (1 - zerostate['e'])
endstate['q'] = endstate['a'] * (1 - endstate['e'])
return zerostate, endstate | python | def parse_nate_sims(path):
'''
parts0.dat) contains the id number, particle fraction (ignore) a, ecc, inc, long. asc., arg. per, and mean anomaly
for every particle in the simulation at t=0.
The second (parts3999.dat) contains the same info at t=3.999 Gyrs for these particles.
:return:
'''
zerostate = pandas.read_table(path + 'parts0.dat', delim_whitespace=True)
endstate = pandas.read_table(path + 'parts3999.dat', delim_whitespace=True)
# add perihelion
zerostate['q'] = zerostate['a'] * (1 - zerostate['e'])
endstate['q'] = endstate['a'] * (1 - endstate['e'])
return zerostate, endstate | [
"def",
"parse_nate_sims",
"(",
"path",
")",
":",
"zerostate",
"=",
"pandas",
".",
"read_table",
"(",
"path",
"+",
"'parts0.dat'",
",",
"delim_whitespace",
"=",
"True",
")",
"endstate",
"=",
"pandas",
".",
"read_table",
"(",
"path",
"+",
"'parts3999.dat'",
",... | parts0.dat) contains the id number, particle fraction (ignore) a, ecc, inc, long. asc., arg. per, and mean anomaly
for every particle in the simulation at t=0.
The second (parts3999.dat) contains the same info at t=3.999 Gyrs for these particles.
:return: | [
"parts0",
".",
"dat",
")",
"contains",
"the",
"id",
"number",
"particle",
"fraction",
"(",
"ignore",
")",
"a",
"ecc",
"inc",
"long",
".",
"asc",
".",
"arg",
".",
"per",
"and",
"mean",
"anomaly",
"for",
"every",
"particle",
"in",
"the",
"simulation",
"a... | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/plotting/scripts/plot_aq.py#L17-L32 |
openstack/python-scciclient | scciclient/irmc/viom/client.py | _convert_netmask | def _convert_netmask(mask):
"""Convert netmask from CIDR format(integer) to doted decimal string."""
if mask not in range(0, 33):
raise scci.SCCIInvalidInputError(
'Netmask value is invalid.')
return socket.inet_ntoa(struct.pack(
'!L', int('1' * mask + '0' * (32 - mask), 2))) | python | def _convert_netmask(mask):
"""Convert netmask from CIDR format(integer) to doted decimal string."""
if mask not in range(0, 33):
raise scci.SCCIInvalidInputError(
'Netmask value is invalid.')
return socket.inet_ntoa(struct.pack(
'!L', int('1' * mask + '0' * (32 - mask), 2))) | [
"def",
"_convert_netmask",
"(",
"mask",
")",
":",
"if",
"mask",
"not",
"in",
"range",
"(",
"0",
",",
"33",
")",
":",
"raise",
"scci",
".",
"SCCIInvalidInputError",
"(",
"'Netmask value is invalid.'",
")",
"return",
"socket",
".",
"inet_ntoa",
"(",
"struct",
... | Convert netmask from CIDR format(integer) to doted decimal string. | [
"Convert",
"netmask",
"from",
"CIDR",
"format",
"(",
"integer",
")",
"to",
"doted",
"decimal",
"string",
"."
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/viom/client.py#L249-L256 |
openstack/python-scciclient | scciclient/irmc/viom/client.py | VIOMConfiguration.apply | def apply(self, reboot=False):
"""Apply the configuration to iRMC."""
self.root.use_virtual_addresses = True
self.root.manage.manage = True
self.root.mode = 'new'
self.root.init_boot = reboot
self.client.set_profile(self.root.get_json()) | python | def apply(self, reboot=False):
"""Apply the configuration to iRMC."""
self.root.use_virtual_addresses = True
self.root.manage.manage = True
self.root.mode = 'new'
self.root.init_boot = reboot
self.client.set_profile(self.root.get_json()) | [
"def",
"apply",
"(",
"self",
",",
"reboot",
"=",
"False",
")",
":",
"self",
".",
"root",
".",
"use_virtual_addresses",
"=",
"True",
"self",
".",
"root",
".",
"manage",
".",
"manage",
"=",
"True",
"self",
".",
"root",
".",
"mode",
"=",
"'new'",
"self"... | Apply the configuration to iRMC. | [
"Apply",
"the",
"configuration",
"to",
"iRMC",
"."
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/viom/client.py#L268-L275 |
openstack/python-scciclient | scciclient/irmc/viom/client.py | VIOMConfiguration.terminate | def terminate(self, reboot=False):
"""Delete VIOM configuration from iRMC."""
self.root.manage.manage = False
self.root.mode = 'delete'
self.root.init_boot = reboot
self.client.set_profile(self.root.get_json()) | python | def terminate(self, reboot=False):
"""Delete VIOM configuration from iRMC."""
self.root.manage.manage = False
self.root.mode = 'delete'
self.root.init_boot = reboot
self.client.set_profile(self.root.get_json()) | [
"def",
"terminate",
"(",
"self",
",",
"reboot",
"=",
"False",
")",
":",
"self",
".",
"root",
".",
"manage",
".",
"manage",
"=",
"False",
"self",
".",
"root",
".",
"mode",
"=",
"'delete'",
"self",
".",
"root",
".",
"init_boot",
"=",
"reboot",
"self",
... | Delete VIOM configuration from iRMC. | [
"Delete",
"VIOM",
"configuration",
"from",
"iRMC",
"."
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/viom/client.py#L277-L282 |
openstack/python-scciclient | scciclient/irmc/viom/client.py | VIOMConfiguration.set_lan_port | def set_lan_port(self, port_id, mac=None):
"""Set LAN port information to configuration.
:param port_id: Physical port ID.
:param mac: virtual MAC address if virtualization is necessary.
"""
port_handler = _parse_physical_port_id(port_id)
port = self._find_port(port_handler)
if port:
port_handler.set_lan_port(port, mac)
else:
self._add_port(port_handler, port_handler.create_lan_port(mac)) | python | def set_lan_port(self, port_id, mac=None):
"""Set LAN port information to configuration.
:param port_id: Physical port ID.
:param mac: virtual MAC address if virtualization is necessary.
"""
port_handler = _parse_physical_port_id(port_id)
port = self._find_port(port_handler)
if port:
port_handler.set_lan_port(port, mac)
else:
self._add_port(port_handler, port_handler.create_lan_port(mac)) | [
"def",
"set_lan_port",
"(",
"self",
",",
"port_id",
",",
"mac",
"=",
"None",
")",
":",
"port_handler",
"=",
"_parse_physical_port_id",
"(",
"port_id",
")",
"port",
"=",
"self",
".",
"_find_port",
"(",
"port_handler",
")",
"if",
"port",
":",
"port_handler",
... | Set LAN port information to configuration.
:param port_id: Physical port ID.
:param mac: virtual MAC address if virtualization is necessary. | [
"Set",
"LAN",
"port",
"information",
"to",
"configuration",
"."
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/viom/client.py#L312-L323 |
openstack/python-scciclient | scciclient/irmc/viom/client.py | VIOMConfiguration.set_iscsi_volume | def set_iscsi_volume(self, port_id,
initiator_iqn, initiator_dhcp=False,
initiator_ip=None, initiator_netmask=None,
target_dhcp=False, target_iqn=None, target_ip=None,
target_port=3260, target_lun=0, boot_prio=1,
chap_user=None, chap_secret=None,
mutual_chap_secret=None):
"""Set iSCSI volume information to configuration.
:param port_id: Physical port ID.
:param initiator_iqn: IQN of initiator.
:param initiator_dhcp: True if DHCP is used in the iSCSI network.
:param initiator_ip: IP address of initiator. None if DHCP is used.
:param initiator_netmask: Netmask of initiator as integer. None if
DHCP is used.
:param target_dhcp: True if DHCP is used for iSCSI target.
:param target_iqn: IQN of target. None if DHCP is used.
:param target_ip: IP address of target. None if DHCP is used.
:param target_port: Port number of target. None if DHCP is used.
:param target_lun: LUN number of target. None if DHCP is used,
:param boot_prio: Boot priority of the volume. 1 indicates the highest
priority.
"""
initiator_netmask = (_convert_netmask(initiator_netmask)
if initiator_netmask else None)
port_handler = _parse_physical_port_id(port_id)
iscsi_boot = _create_iscsi_boot(
initiator_iqn,
initiator_dhcp=initiator_dhcp,
initiator_ip=initiator_ip,
initiator_netmask=initiator_netmask,
target_dhcp=target_dhcp,
target_iqn=target_iqn,
target_ip=target_ip,
target_port=target_port,
target_lun=target_lun,
boot_prio=boot_prio,
chap_user=chap_user,
chap_secret=chap_secret,
mutual_chap_secret=mutual_chap_secret)
port = self._find_port(port_handler)
if port:
port_handler.set_iscsi_port(port, iscsi_boot)
else:
port = port_handler.create_iscsi_port(iscsi_boot)
self._add_port(port_handler, port) | python | def set_iscsi_volume(self, port_id,
initiator_iqn, initiator_dhcp=False,
initiator_ip=None, initiator_netmask=None,
target_dhcp=False, target_iqn=None, target_ip=None,
target_port=3260, target_lun=0, boot_prio=1,
chap_user=None, chap_secret=None,
mutual_chap_secret=None):
"""Set iSCSI volume information to configuration.
:param port_id: Physical port ID.
:param initiator_iqn: IQN of initiator.
:param initiator_dhcp: True if DHCP is used in the iSCSI network.
:param initiator_ip: IP address of initiator. None if DHCP is used.
:param initiator_netmask: Netmask of initiator as integer. None if
DHCP is used.
:param target_dhcp: True if DHCP is used for iSCSI target.
:param target_iqn: IQN of target. None if DHCP is used.
:param target_ip: IP address of target. None if DHCP is used.
:param target_port: Port number of target. None if DHCP is used.
:param target_lun: LUN number of target. None if DHCP is used,
:param boot_prio: Boot priority of the volume. 1 indicates the highest
priority.
"""
initiator_netmask = (_convert_netmask(initiator_netmask)
if initiator_netmask else None)
port_handler = _parse_physical_port_id(port_id)
iscsi_boot = _create_iscsi_boot(
initiator_iqn,
initiator_dhcp=initiator_dhcp,
initiator_ip=initiator_ip,
initiator_netmask=initiator_netmask,
target_dhcp=target_dhcp,
target_iqn=target_iqn,
target_ip=target_ip,
target_port=target_port,
target_lun=target_lun,
boot_prio=boot_prio,
chap_user=chap_user,
chap_secret=chap_secret,
mutual_chap_secret=mutual_chap_secret)
port = self._find_port(port_handler)
if port:
port_handler.set_iscsi_port(port, iscsi_boot)
else:
port = port_handler.create_iscsi_port(iscsi_boot)
self._add_port(port_handler, port) | [
"def",
"set_iscsi_volume",
"(",
"self",
",",
"port_id",
",",
"initiator_iqn",
",",
"initiator_dhcp",
"=",
"False",
",",
"initiator_ip",
"=",
"None",
",",
"initiator_netmask",
"=",
"None",
",",
"target_dhcp",
"=",
"False",
",",
"target_iqn",
"=",
"None",
",",
... | Set iSCSI volume information to configuration.
:param port_id: Physical port ID.
:param initiator_iqn: IQN of initiator.
:param initiator_dhcp: True if DHCP is used in the iSCSI network.
:param initiator_ip: IP address of initiator. None if DHCP is used.
:param initiator_netmask: Netmask of initiator as integer. None if
DHCP is used.
:param target_dhcp: True if DHCP is used for iSCSI target.
:param target_iqn: IQN of target. None if DHCP is used.
:param target_ip: IP address of target. None if DHCP is used.
:param target_port: Port number of target. None if DHCP is used.
:param target_lun: LUN number of target. None if DHCP is used,
:param boot_prio: Boot priority of the volume. 1 indicates the highest
priority. | [
"Set",
"iSCSI",
"volume",
"information",
"to",
"configuration",
"."
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/viom/client.py#L325-L373 |
openstack/python-scciclient | scciclient/irmc/viom/client.py | VIOMConfiguration.set_fc_volume | def set_fc_volume(self, port_id,
target_wwn, target_lun=0, boot_prio=1,
initiator_wwnn=None, initiator_wwpn=None):
"""Set FibreChannel volume information to configuration.
:param port_id: Physical port ID.
:param target_wwn: WWN of target.
:param target_lun: LUN number of target.
:param boot_prio: Boot priority of the volume. 1 indicates the highest
priority.
:param initiator_wwnn: Virtual WWNN for initiator if necessary.
:param initiator_wwpn: Virtual WWPN for initiator if necessary.
"""
port_handler = _parse_physical_port_id(port_id)
fc_target = elcm.FCTarget(target_wwn, target_lun)
fc_boot = elcm.FCBoot(boot_prio=boot_prio, boot_enable=True)
fc_boot.add_target(fc_target)
port = self._find_port(port_handler)
if port:
port_handler.set_fc_port(port, fc_boot,
wwnn=initiator_wwnn, wwpn=initiator_wwpn)
else:
port = port_handler.create_fc_port(fc_boot,
wwnn=initiator_wwnn,
wwpn=initiator_wwpn)
self._add_port(port_handler, port) | python | def set_fc_volume(self, port_id,
target_wwn, target_lun=0, boot_prio=1,
initiator_wwnn=None, initiator_wwpn=None):
"""Set FibreChannel volume information to configuration.
:param port_id: Physical port ID.
:param target_wwn: WWN of target.
:param target_lun: LUN number of target.
:param boot_prio: Boot priority of the volume. 1 indicates the highest
priority.
:param initiator_wwnn: Virtual WWNN for initiator if necessary.
:param initiator_wwpn: Virtual WWPN for initiator if necessary.
"""
port_handler = _parse_physical_port_id(port_id)
fc_target = elcm.FCTarget(target_wwn, target_lun)
fc_boot = elcm.FCBoot(boot_prio=boot_prio, boot_enable=True)
fc_boot.add_target(fc_target)
port = self._find_port(port_handler)
if port:
port_handler.set_fc_port(port, fc_boot,
wwnn=initiator_wwnn, wwpn=initiator_wwpn)
else:
port = port_handler.create_fc_port(fc_boot,
wwnn=initiator_wwnn,
wwpn=initiator_wwpn)
self._add_port(port_handler, port) | [
"def",
"set_fc_volume",
"(",
"self",
",",
"port_id",
",",
"target_wwn",
",",
"target_lun",
"=",
"0",
",",
"boot_prio",
"=",
"1",
",",
"initiator_wwnn",
"=",
"None",
",",
"initiator_wwpn",
"=",
"None",
")",
":",
"port_handler",
"=",
"_parse_physical_port_id",
... | Set FibreChannel volume information to configuration.
:param port_id: Physical port ID.
:param target_wwn: WWN of target.
:param target_lun: LUN number of target.
:param boot_prio: Boot priority of the volume. 1 indicates the highest
priority.
:param initiator_wwnn: Virtual WWNN for initiator if necessary.
:param initiator_wwpn: Virtual WWPN for initiator if necessary. | [
"Set",
"FibreChannel",
"volume",
"information",
"to",
"configuration",
"."
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/viom/client.py#L375-L401 |
openstack/python-scciclient | scciclient/irmc/viom/client.py | VIOMConfiguration._pad_former_ports | def _pad_former_ports(self, port_handler):
"""Create ports with former port index.
:param port_handler: Port information to be registered.
Depending on slot type and card type, it is necessary to register
LAN ports with former index to VIOM table.
"""
if not port_handler.need_padding():
return
for port_idx in range(1, port_handler.port_idx):
pad_handler = port_handler.__class__(
port_handler.slot_type,
port_handler.card_type,
port_handler.slot_idx,
port_handler.card_idx,
port_idx)
if not self._find_port(pad_handler):
self._add_port(pad_handler,
pad_handler.create_lan_port()) | python | def _pad_former_ports(self, port_handler):
"""Create ports with former port index.
:param port_handler: Port information to be registered.
Depending on slot type and card type, it is necessary to register
LAN ports with former index to VIOM table.
"""
if not port_handler.need_padding():
return
for port_idx in range(1, port_handler.port_idx):
pad_handler = port_handler.__class__(
port_handler.slot_type,
port_handler.card_type,
port_handler.slot_idx,
port_handler.card_idx,
port_idx)
if not self._find_port(pad_handler):
self._add_port(pad_handler,
pad_handler.create_lan_port()) | [
"def",
"_pad_former_ports",
"(",
"self",
",",
"port_handler",
")",
":",
"if",
"not",
"port_handler",
".",
"need_padding",
"(",
")",
":",
"return",
"for",
"port_idx",
"in",
"range",
"(",
"1",
",",
"port_handler",
".",
"port_idx",
")",
":",
"pad_handler",
"=... | Create ports with former port index.
:param port_handler: Port information to be registered.
Depending on slot type and card type, it is necessary to register
LAN ports with former index to VIOM table. | [
"Create",
"ports",
"with",
"former",
"port",
"index",
"."
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/viom/client.py#L411-L430 |
JohnVinyard/zounds | zounds/soundfile/byte_depth.py | chunk_size_samples | def chunk_size_samples(sf, buf):
"""
Black magic to account for the fact that libsndfile's behavior varies
depending on file format when using the virtual io api.
If you ask for more samples from an ogg or flac file than are available
at that moment, libsndfile will give you no more samples ever, even if
more bytes arrive in the buffer later.
"""
byte_depth = _lookup[sf.subtype]
channels = sf.channels
bytes_per_second = byte_depth * sf.samplerate * channels
secs = len(buf) / bytes_per_second
secs = max(1, secs - 6)
return int(secs * sf.samplerate) | python | def chunk_size_samples(sf, buf):
"""
Black magic to account for the fact that libsndfile's behavior varies
depending on file format when using the virtual io api.
If you ask for more samples from an ogg or flac file than are available
at that moment, libsndfile will give you no more samples ever, even if
more bytes arrive in the buffer later.
"""
byte_depth = _lookup[sf.subtype]
channels = sf.channels
bytes_per_second = byte_depth * sf.samplerate * channels
secs = len(buf) / bytes_per_second
secs = max(1, secs - 6)
return int(secs * sf.samplerate) | [
"def",
"chunk_size_samples",
"(",
"sf",
",",
"buf",
")",
":",
"byte_depth",
"=",
"_lookup",
"[",
"sf",
".",
"subtype",
"]",
"channels",
"=",
"sf",
".",
"channels",
"bytes_per_second",
"=",
"byte_depth",
"*",
"sf",
".",
"samplerate",
"*",
"channels",
"secs"... | Black magic to account for the fact that libsndfile's behavior varies
depending on file format when using the virtual io api.
If you ask for more samples from an ogg or flac file than are available
at that moment, libsndfile will give you no more samples ever, even if
more bytes arrive in the buffer later. | [
"Black",
"magic",
"to",
"account",
"for",
"the",
"fact",
"that",
"libsndfile",
"s",
"behavior",
"varies",
"depending",
"on",
"file",
"format",
"when",
"using",
"the",
"virtual",
"io",
"api",
"."
] | train | https://github.com/JohnVinyard/zounds/blob/337b3f98753d09eaab1c72dcd37bb852a3fa5ac6/zounds/soundfile/byte_depth.py#L19-L33 |
OSSOS/MOP | src/ossos/core/ossos/coding.py | encode | def encode(number, alphabet):
"""
Converts an integer to a base n string where n is the length of the
provided alphabet.
Modified from http://en.wikipedia.org/wiki/Base_36
"""
if not isinstance(number, (int, long)):
raise TypeError("Number must be an integer.")
base_n = ""
sign = ""
if number < 0:
sign = "-"
number = -number
if 0 <= number < len(alphabet):
return sign + alphabet[number]
while number != 0:
number, i = divmod(number, len(alphabet))
base_n = alphabet[i] + base_n
return sign + base_n | python | def encode(number, alphabet):
"""
Converts an integer to a base n string where n is the length of the
provided alphabet.
Modified from http://en.wikipedia.org/wiki/Base_36
"""
if not isinstance(number, (int, long)):
raise TypeError("Number must be an integer.")
base_n = ""
sign = ""
if number < 0:
sign = "-"
number = -number
if 0 <= number < len(alphabet):
return sign + alphabet[number]
while number != 0:
number, i = divmod(number, len(alphabet))
base_n = alphabet[i] + base_n
return sign + base_n | [
"def",
"encode",
"(",
"number",
",",
"alphabet",
")",
":",
"if",
"not",
"isinstance",
"(",
"number",
",",
"(",
"int",
",",
"long",
")",
")",
":",
"raise",
"TypeError",
"(",
"\"Number must be an integer.\"",
")",
"base_n",
"=",
"\"\"",
"sign",
"=",
"\"\""... | Converts an integer to a base n string where n is the length of the
provided alphabet.
Modified from http://en.wikipedia.org/wiki/Base_36 | [
"Converts",
"an",
"integer",
"to",
"a",
"base",
"n",
"string",
"where",
"n",
"is",
"the",
"length",
"of",
"the",
"provided",
"alphabet",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/coding.py#L17-L41 |
JohnVinyard/zounds | zounds/timeseries/functional.py | categorical | def categorical(x, mu=255, normalize=True):
"""
Mu-law compress a block of audio samples, and convert them into a
categorical distribution
"""
if normalize:
# normalize the signal
mx = x.max()
x = np.divide(x, mx, where=mx != 0)
# mu law compression
x = mu_law(x)
# translate and scale to [0, 1]
x = (x - x.min()) * 0.5
# convert to the range [0, 255]
x = (x * mu).astype(np.uint8)
# create the array to house the categorical representation
c = np.zeros((np.product(x.shape), mu + 1), dtype=np.uint8)
c[np.arange(len(c)), x.flatten()] = 1
return ArrayWithUnits(
c.reshape(x.shape + (mu + 1,)),
x.dimensions + (IdentityDimension(),)) | python | def categorical(x, mu=255, normalize=True):
"""
Mu-law compress a block of audio samples, and convert them into a
categorical distribution
"""
if normalize:
# normalize the signal
mx = x.max()
x = np.divide(x, mx, where=mx != 0)
# mu law compression
x = mu_law(x)
# translate and scale to [0, 1]
x = (x - x.min()) * 0.5
# convert to the range [0, 255]
x = (x * mu).astype(np.uint8)
# create the array to house the categorical representation
c = np.zeros((np.product(x.shape), mu + 1), dtype=np.uint8)
c[np.arange(len(c)), x.flatten()] = 1
return ArrayWithUnits(
c.reshape(x.shape + (mu + 1,)),
x.dimensions + (IdentityDimension(),)) | [
"def",
"categorical",
"(",
"x",
",",
"mu",
"=",
"255",
",",
"normalize",
"=",
"True",
")",
":",
"if",
"normalize",
":",
"# normalize the signal",
"mx",
"=",
"x",
".",
"max",
"(",
")",
"x",
"=",
"np",
".",
"divide",
"(",
"x",
",",
"mx",
",",
"wher... | Mu-law compress a block of audio samples, and convert them into a
categorical distribution | [
"Mu",
"-",
"law",
"compress",
"a",
"block",
"of",
"audio",
"samples",
"and",
"convert",
"them",
"into",
"a",
"categorical",
"distribution"
] | train | https://github.com/JohnVinyard/zounds/blob/337b3f98753d09eaab1c72dcd37bb852a3fa5ac6/zounds/timeseries/functional.py#L7-L33 |
JohnVinyard/zounds | zounds/timeseries/functional.py | inverse_categorical | def inverse_categorical(x, mu=255):
"""
Invert categorical samples
"""
flat = x.reshape((-1, x.shape[-1]))
indices = np.argmax(flat, axis=1).astype(np.float32)
indices = (indices / mu) - 0.5
inverted = inverse_mu_law(indices, mu=mu).reshape(x.shape[:-1])
return ArrayWithUnits(inverted, x.dimensions[:2]) | python | def inverse_categorical(x, mu=255):
"""
Invert categorical samples
"""
flat = x.reshape((-1, x.shape[-1]))
indices = np.argmax(flat, axis=1).astype(np.float32)
indices = (indices / mu) - 0.5
inverted = inverse_mu_law(indices, mu=mu).reshape(x.shape[:-1])
return ArrayWithUnits(inverted, x.dimensions[:2]) | [
"def",
"inverse_categorical",
"(",
"x",
",",
"mu",
"=",
"255",
")",
":",
"flat",
"=",
"x",
".",
"reshape",
"(",
"(",
"-",
"1",
",",
"x",
".",
"shape",
"[",
"-",
"1",
"]",
")",
")",
"indices",
"=",
"np",
".",
"argmax",
"(",
"flat",
",",
"axis"... | Invert categorical samples | [
"Invert",
"categorical",
"samples"
] | train | https://github.com/JohnVinyard/zounds/blob/337b3f98753d09eaab1c72dcd37bb852a3fa5ac6/zounds/timeseries/functional.py#L37-L45 |
JohnVinyard/zounds | zounds/synthesize/synthesize.py | SineSynthesizer.synthesize | def synthesize(self, duration, freqs_in_hz=[440.]):
"""
Synthesize one or more sine waves
Args:
duration (numpy.timdelta64): The duration of the sound to be
synthesized
freqs_in_hz (list of float): Numbers representing the frequencies
in hz that should be synthesized
"""
freqs = np.array(freqs_in_hz)
scaling = 1 / len(freqs)
sr = int(self.samplerate)
cps = freqs / sr
ts = (duration / Seconds(1)) * sr
ranges = np.array([np.arange(0, ts * c, c) for c in cps])
raw = (np.sin(ranges * (2 * np.pi)) * scaling).sum(axis=0)
return AudioSamples(raw, self.samplerate) | python | def synthesize(self, duration, freqs_in_hz=[440.]):
"""
Synthesize one or more sine waves
Args:
duration (numpy.timdelta64): The duration of the sound to be
synthesized
freqs_in_hz (list of float): Numbers representing the frequencies
in hz that should be synthesized
"""
freqs = np.array(freqs_in_hz)
scaling = 1 / len(freqs)
sr = int(self.samplerate)
cps = freqs / sr
ts = (duration / Seconds(1)) * sr
ranges = np.array([np.arange(0, ts * c, c) for c in cps])
raw = (np.sin(ranges * (2 * np.pi)) * scaling).sum(axis=0)
return AudioSamples(raw, self.samplerate) | [
"def",
"synthesize",
"(",
"self",
",",
"duration",
",",
"freqs_in_hz",
"=",
"[",
"440.",
"]",
")",
":",
"freqs",
"=",
"np",
".",
"array",
"(",
"freqs_in_hz",
")",
"scaling",
"=",
"1",
"/",
"len",
"(",
"freqs",
")",
"sr",
"=",
"int",
"(",
"self",
... | Synthesize one or more sine waves
Args:
duration (numpy.timdelta64): The duration of the sound to be
synthesized
freqs_in_hz (list of float): Numbers representing the frequencies
in hz that should be synthesized | [
"Synthesize",
"one",
"or",
"more",
"sine",
"waves"
] | train | https://github.com/JohnVinyard/zounds/blob/337b3f98753d09eaab1c72dcd37bb852a3fa5ac6/zounds/synthesize/synthesize.py#L546-L563 |
JohnVinyard/zounds | zounds/synthesize/synthesize.py | TickSynthesizer.synthesize | def synthesize(self, duration, tick_frequency):
"""
Synthesize periodic "ticks", generated from white noise and an envelope
Args:
duration (numpy.timedelta64): The total duration of the sound to be
synthesized
tick_frequency (numpy.timedelta64): The frequency of the ticking
sound
"""
sr = self.samplerate.samples_per_second
# create a short, tick sound
tick = np.random.uniform(low=-1., high=1., size=int(sr * .1))
tick *= np.linspace(1, 0, len(tick))
# create silence
samples = np.zeros(int(sr * (duration / Seconds(1))))
ticks_per_second = Seconds(1) / tick_frequency
# introduce periodic ticking sound
step = int(sr // ticks_per_second)
for i in range(0, len(samples), step):
size = len(samples[i:i + len(tick)])
samples[i:i + len(tick)] += tick[:size]
return AudioSamples(samples, self.samplerate) | python | def synthesize(self, duration, tick_frequency):
"""
Synthesize periodic "ticks", generated from white noise and an envelope
Args:
duration (numpy.timedelta64): The total duration of the sound to be
synthesized
tick_frequency (numpy.timedelta64): The frequency of the ticking
sound
"""
sr = self.samplerate.samples_per_second
# create a short, tick sound
tick = np.random.uniform(low=-1., high=1., size=int(sr * .1))
tick *= np.linspace(1, 0, len(tick))
# create silence
samples = np.zeros(int(sr * (duration / Seconds(1))))
ticks_per_second = Seconds(1) / tick_frequency
# introduce periodic ticking sound
step = int(sr // ticks_per_second)
for i in range(0, len(samples), step):
size = len(samples[i:i + len(tick)])
samples[i:i + len(tick)] += tick[:size]
return AudioSamples(samples, self.samplerate) | [
"def",
"synthesize",
"(",
"self",
",",
"duration",
",",
"tick_frequency",
")",
":",
"sr",
"=",
"self",
".",
"samplerate",
".",
"samples_per_second",
"# create a short, tick sound",
"tick",
"=",
"np",
".",
"random",
".",
"uniform",
"(",
"low",
"=",
"-",
"1.",... | Synthesize periodic "ticks", generated from white noise and an envelope
Args:
duration (numpy.timedelta64): The total duration of the sound to be
synthesized
tick_frequency (numpy.timedelta64): The frequency of the ticking
sound | [
"Synthesize",
"periodic",
"ticks",
"generated",
"from",
"white",
"noise",
"and",
"an",
"envelope"
] | train | https://github.com/JohnVinyard/zounds/blob/337b3f98753d09eaab1c72dcd37bb852a3fa5ac6/zounds/synthesize/synthesize.py#L593-L615 |
JohnVinyard/zounds | zounds/synthesize/synthesize.py | NoiseSynthesizer.synthesize | def synthesize(self, duration):
"""
Synthesize white noise
Args:
duration (numpy.timedelta64): The duration of the synthesized sound
"""
sr = self.samplerate.samples_per_second
seconds = duration / Seconds(1)
samples = np.random.uniform(low=-1., high=1., size=int(sr * seconds))
return AudioSamples(samples, self.samplerate) | python | def synthesize(self, duration):
"""
Synthesize white noise
Args:
duration (numpy.timedelta64): The duration of the synthesized sound
"""
sr = self.samplerate.samples_per_second
seconds = duration / Seconds(1)
samples = np.random.uniform(low=-1., high=1., size=int(sr * seconds))
return AudioSamples(samples, self.samplerate) | [
"def",
"synthesize",
"(",
"self",
",",
"duration",
")",
":",
"sr",
"=",
"self",
".",
"samplerate",
".",
"samples_per_second",
"seconds",
"=",
"duration",
"/",
"Seconds",
"(",
"1",
")",
"samples",
"=",
"np",
".",
"random",
".",
"uniform",
"(",
"low",
"=... | Synthesize white noise
Args:
duration (numpy.timedelta64): The duration of the synthesized sound | [
"Synthesize",
"white",
"noise"
] | train | https://github.com/JohnVinyard/zounds/blob/337b3f98753d09eaab1c72dcd37bb852a3fa5ac6/zounds/synthesize/synthesize.py#L644-L654 |
OSSOS/MOP | src/ossos/core/ossos/gui/models/workload.py | TracksWorkUnit.query_ssos | def query_ssos(self):
"""
Use the MPC file that has been built up in processing this work
unit to generate another workunit.
"""
self._ssos_queried = True
mpc_filename = self.save()
return self.builder.build_workunit(mpc_filename) | python | def query_ssos(self):
"""
Use the MPC file that has been built up in processing this work
unit to generate another workunit.
"""
self._ssos_queried = True
mpc_filename = self.save()
return self.builder.build_workunit(mpc_filename) | [
"def",
"query_ssos",
"(",
"self",
")",
":",
"self",
".",
"_ssos_queried",
"=",
"True",
"mpc_filename",
"=",
"self",
".",
"save",
"(",
")",
"return",
"self",
".",
"builder",
".",
"build_workunit",
"(",
"mpc_filename",
")"
] | Use the MPC file that has been built up in processing this work
unit to generate another workunit. | [
"Use",
"the",
"MPC",
"file",
"that",
"has",
"been",
"built",
"up",
"in",
"processing",
"this",
"work",
"unit",
"to",
"generate",
"another",
"workunit",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/gui/models/workload.py#L410-L417 |
OSSOS/MOP | src/ossos/core/ossos/gui/models/workload.py | TracksWorkUnit.save | def save(self):
"""
Update the SouceReading information for the currently recorded observations and then flush those to a file.
@return: mpc_filename of the resulting save.
"""
self.get_writer().flush()
mpc_filename = self.get_writer().get_filename()
self.get_writer().close()
self._writer = None
return mpc_filename | python | def save(self):
"""
Update the SouceReading information for the currently recorded observations and then flush those to a file.
@return: mpc_filename of the resulting save.
"""
self.get_writer().flush()
mpc_filename = self.get_writer().get_filename()
self.get_writer().close()
self._writer = None
return mpc_filename | [
"def",
"save",
"(",
"self",
")",
":",
"self",
".",
"get_writer",
"(",
")",
".",
"flush",
"(",
")",
"mpc_filename",
"=",
"self",
".",
"get_writer",
"(",
")",
".",
"get_filename",
"(",
")",
"self",
".",
"get_writer",
"(",
")",
".",
"close",
"(",
")",... | Update the SouceReading information for the currently recorded observations and then flush those to a file.
@return: mpc_filename of the resulting save. | [
"Update",
"the",
"SouceReading",
"information",
"for",
"the",
"currently",
"recorded",
"observations",
"and",
"then",
"flush",
"those",
"to",
"a",
"file",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/gui/models/workload.py#L419-L428 |
OSSOS/MOP | src/ossos/core/ossos/gui/models/workload.py | TracksWorkUnit.get_writer | def get_writer(self):
"""
Get a writer.
This method also makes the output filename be the same as the .track file but with .mpc.
(Currently only works on local filesystem)
:rtype MPCWriter
"""
if self._writer is None:
suffix = tasks.get_suffix(tasks.TRACK_TASK)
try:
base_name = re.search("(?P<base_name>.*?)\.\d*{}".format(suffix), self.filename).group('base_name')
except:
base_name = os.path.splitext(self.filename)[0]
mpc_filename_pattern = self.output_context.get_full_path(
"{}.?{}".format(base_name, suffix))
mpc_file_count = len(glob(mpc_filename_pattern))
mpc_filename = "{}.{}{}".format(base_name, mpc_file_count, suffix)
self._writer = self._create_writer(mpc_filename)
return self._writer | python | def get_writer(self):
"""
Get a writer.
This method also makes the output filename be the same as the .track file but with .mpc.
(Currently only works on local filesystem)
:rtype MPCWriter
"""
if self._writer is None:
suffix = tasks.get_suffix(tasks.TRACK_TASK)
try:
base_name = re.search("(?P<base_name>.*?)\.\d*{}".format(suffix), self.filename).group('base_name')
except:
base_name = os.path.splitext(self.filename)[0]
mpc_filename_pattern = self.output_context.get_full_path(
"{}.?{}".format(base_name, suffix))
mpc_file_count = len(glob(mpc_filename_pattern))
mpc_filename = "{}.{}{}".format(base_name, mpc_file_count, suffix)
self._writer = self._create_writer(mpc_filename)
return self._writer | [
"def",
"get_writer",
"(",
"self",
")",
":",
"if",
"self",
".",
"_writer",
"is",
"None",
":",
"suffix",
"=",
"tasks",
".",
"get_suffix",
"(",
"tasks",
".",
"TRACK_TASK",
")",
"try",
":",
"base_name",
"=",
"re",
".",
"search",
"(",
"\"(?P<base_name>.*?)\\.... | Get a writer.
This method also makes the output filename be the same as the .track file but with .mpc.
(Currently only works on local filesystem)
:rtype MPCWriter | [
"Get",
"a",
"writer",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/gui/models/workload.py#L466-L486 |
OSSOS/MOP | src/ossos/core/ossos/gui/models/workload.py | WorkUnitProvider._filter | def _filter(self, filename):
"""
return 'true' if filename doesn't match name_filter regex and should be filtered out of the list.
@param filename:
@return:
"""
return self.name_filter is not None and re.search(self.name_filter, filename) is None | python | def _filter(self, filename):
"""
return 'true' if filename doesn't match name_filter regex and should be filtered out of the list.
@param filename:
@return:
"""
return self.name_filter is not None and re.search(self.name_filter, filename) is None | [
"def",
"_filter",
"(",
"self",
",",
"filename",
")",
":",
"return",
"self",
".",
"name_filter",
"is",
"not",
"None",
"and",
"re",
".",
"search",
"(",
"self",
".",
"name_filter",
",",
"filename",
")",
"is",
"None"
] | return 'true' if filename doesn't match name_filter regex and should be filtered out of the list.
@param filename:
@return: | [
"return",
"true",
"if",
"filename",
"doesn",
"t",
"match",
"name_filter",
"regex",
"and",
"should",
"be",
"filtered",
"out",
"of",
"the",
"list",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/gui/models/workload.py#L571-L577 |
OSSOS/MOP | src/ossos/core/ossos/gui/models/workload.py | WorkUnitProvider.get_workunit | def get_workunit(self, ignore_list=None):
"""
Gets a new unit of work.
Args:
ignore_list: list(str)
A list of filenames which should be ignored. Defaults to None.
Returns:
new_workunit: WorkUnit
A new unit of work that has not yet been processed. A lock on
it has been acquired.
Raises:
NoAvailableWorkException
There is no more work available.
"""
if ignore_list is None:
ignore_list = []
potential_files = self.get_potential_files(ignore_list)
while len(potential_files) > 0:
potential_file = self.select_potential_file(potential_files)
potential_files.remove(potential_file)
if self._filter(potential_file):
continue
if self.directory_context.get_file_size(potential_file) == 0:
continue
if self.progress_manager.is_done(potential_file):
self._done.append(potential_file)
continue
else:
try:
self.progress_manager.lock(potential_file)
except FileLockedException:
continue
self._already_fetched.append(potential_file)
return self.builder.build_workunit(
self.directory_context.get_full_path(potential_file))
logger.info("No eligible workunits remain to be fetched.")
raise NoAvailableWorkException() | python | def get_workunit(self, ignore_list=None):
"""
Gets a new unit of work.
Args:
ignore_list: list(str)
A list of filenames which should be ignored. Defaults to None.
Returns:
new_workunit: WorkUnit
A new unit of work that has not yet been processed. A lock on
it has been acquired.
Raises:
NoAvailableWorkException
There is no more work available.
"""
if ignore_list is None:
ignore_list = []
potential_files = self.get_potential_files(ignore_list)
while len(potential_files) > 0:
potential_file = self.select_potential_file(potential_files)
potential_files.remove(potential_file)
if self._filter(potential_file):
continue
if self.directory_context.get_file_size(potential_file) == 0:
continue
if self.progress_manager.is_done(potential_file):
self._done.append(potential_file)
continue
else:
try:
self.progress_manager.lock(potential_file)
except FileLockedException:
continue
self._already_fetched.append(potential_file)
return self.builder.build_workunit(
self.directory_context.get_full_path(potential_file))
logger.info("No eligible workunits remain to be fetched.")
raise NoAvailableWorkException() | [
"def",
"get_workunit",
"(",
"self",
",",
"ignore_list",
"=",
"None",
")",
":",
"if",
"ignore_list",
"is",
"None",
":",
"ignore_list",
"=",
"[",
"]",
"potential_files",
"=",
"self",
".",
"get_potential_files",
"(",
"ignore_list",
")",
"while",
"len",
"(",
"... | Gets a new unit of work.
Args:
ignore_list: list(str)
A list of filenames which should be ignored. Defaults to None.
Returns:
new_workunit: WorkUnit
A new unit of work that has not yet been processed. A lock on
it has been acquired.
Raises:
NoAvailableWorkException
There is no more work available. | [
"Gets",
"a",
"new",
"unit",
"of",
"work",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/gui/models/workload.py#L579-L625 |
OSSOS/MOP | src/ossos/core/ossos/gui/models/workload.py | WorkUnitProvider.get_potential_files | def get_potential_files(self, ignore_list):
"""
Get a listing of files for the appropriate task which may or may
not be locked and/or done.
"""
exclude_prefix = self.taskid == tasks.suffixes.get(tasks.REALS_TASK, '') and 'fk' or None
filenames = [filename for filename in
self.directory_context.get_listing(self.taskid, exclude_prefix=exclude_prefix)
if filename not in ignore_list and
filename not in self._done and
filename not in self._already_fetched]
# if the extension is .mpc. then we look for the largest numbered MPC file.
# look for the largest numbered MPC file only.
if self.taskid == tasks.suffixes.get(tasks.TRACK_TASK, ''):
basenames = {}
for filename in filenames:
fullname = os.path.splitext(filename)[0]
if fullname in basenames:
continue
basename = os.path.splitext(fullname)[0]
# only do the 'maximum' search when the 2nd extension is an integer value
try:
idx = int(filename.split('.')[-2])
if idx > basenames.get(basename, 0):
basenames[basename] = idx
except:
# since we failed, just keep the file in the list
basenames[fullname] = ''
filenames = []
for basename in basenames:
# sometimes the version is empty, so no '.' is needed
version = basenames[basename]
version = len(str(version)) > 0 and ".{}".format(version) or version
filenames.append("{}{}{}".format(basename, version, self.taskid))
# print basename, basenames[basename], filenames[-1]
return filenames | python | def get_potential_files(self, ignore_list):
"""
Get a listing of files for the appropriate task which may or may
not be locked and/or done.
"""
exclude_prefix = self.taskid == tasks.suffixes.get(tasks.REALS_TASK, '') and 'fk' or None
filenames = [filename for filename in
self.directory_context.get_listing(self.taskid, exclude_prefix=exclude_prefix)
if filename not in ignore_list and
filename not in self._done and
filename not in self._already_fetched]
# if the extension is .mpc. then we look for the largest numbered MPC file.
# look for the largest numbered MPC file only.
if self.taskid == tasks.suffixes.get(tasks.TRACK_TASK, ''):
basenames = {}
for filename in filenames:
fullname = os.path.splitext(filename)[0]
if fullname in basenames:
continue
basename = os.path.splitext(fullname)[0]
# only do the 'maximum' search when the 2nd extension is an integer value
try:
idx = int(filename.split('.')[-2])
if idx > basenames.get(basename, 0):
basenames[basename] = idx
except:
# since we failed, just keep the file in the list
basenames[fullname] = ''
filenames = []
for basename in basenames:
# sometimes the version is empty, so no '.' is needed
version = basenames[basename]
version = len(str(version)) > 0 and ".{}".format(version) or version
filenames.append("{}{}{}".format(basename, version, self.taskid))
# print basename, basenames[basename], filenames[-1]
return filenames | [
"def",
"get_potential_files",
"(",
"self",
",",
"ignore_list",
")",
":",
"exclude_prefix",
"=",
"self",
".",
"taskid",
"==",
"tasks",
".",
"suffixes",
".",
"get",
"(",
"tasks",
".",
"REALS_TASK",
",",
"''",
")",
"and",
"'fk'",
"or",
"None",
"filenames",
... | Get a listing of files for the appropriate task which may or may
not be locked and/or done. | [
"Get",
"a",
"listing",
"of",
"files",
"for",
"the",
"appropriate",
"task",
"which",
"may",
"or",
"may",
"not",
"be",
"locked",
"and",
"/",
"or",
"done",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/gui/models/workload.py#L627-L663 |
OSSOS/MOP | src/ossos/core/ossos/gui/models/workload.py | TracksWorkUnitBuilder.move_discovery_to_front | def move_discovery_to_front(self, data):
"""
Moves the discovery triplet to the front of the reading list.
Leaves everything else in the same order.
"""
readings = self.get_readings(data)
discovery_index = self.get_discovery_index(data)
reordered_readings = (readings[discovery_index:discovery_index + 3] +
readings[:discovery_index] +
readings[discovery_index + 3:])
self.set_readings(data, reordered_readings) | python | def move_discovery_to_front(self, data):
"""
Moves the discovery triplet to the front of the reading list.
Leaves everything else in the same order.
"""
readings = self.get_readings(data)
discovery_index = self.get_discovery_index(data)
reordered_readings = (readings[discovery_index:discovery_index + 3] +
readings[:discovery_index] +
readings[discovery_index + 3:])
self.set_readings(data, reordered_readings) | [
"def",
"move_discovery_to_front",
"(",
"self",
",",
"data",
")",
":",
"readings",
"=",
"self",
".",
"get_readings",
"(",
"data",
")",
"discovery_index",
"=",
"self",
".",
"get_discovery_index",
"(",
"data",
")",
"reordered_readings",
"=",
"(",
"readings",
"[",... | Moves the discovery triplet to the front of the reading list.
Leaves everything else in the same order. | [
"Moves",
"the",
"discovery",
"triplet",
"to",
"the",
"front",
"of",
"the",
"reading",
"list",
".",
"Leaves",
"everything",
"else",
"in",
"the",
"same",
"order",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/gui/models/workload.py#L832-L844 |
OSSOS/MOP | src/ossos/core/ossos/planning/megacam.py | TAPQuery | def TAPQuery(RAdeg=180.0, DECdeg=0.0, width=1, height=1):
"""Do a query of the CADC Megacam table. Get all observations insize the box. Returns a file-like object"""
QUERY =( """ SELECT """
""" COORD1(CENTROID(Plane.position_bounds)) AS "RAJ2000", COORD2(CENTROID(Plane.position_bounds)) AS "DEJ2000", Plane.time_bounds_lower as "MJDATE" """
""" FROM """
""" caom2.Observation as o JOIN caom2.Plane as Plane on o.obsID=Plane.obsID """
""" WHERE """
""" o.collection = 'CFHT' """
""" AND o.instrument_name = 'MegaPrime' """
""" AND INTERSECTS( BOX('ICRS', {}, {}, {}, {}), Plane.position_bounds ) = 1 """
""" AND ( o.proposal_id LIKE '%P05') """)
# """ AND ( o.proposal_id LIKE '%P05' OR o.proposal_id LIKE '%L03' or o.proposal_id LIKE '%L06' or o.proposal_id
# in ( '06AF33', '06BF98' ) ) """ )
QUERY = QUERY.format( RAdeg, DECdeg, width, height)
data={"QUERY": QUERY,
"REQUEST": "doQuery",
"LANG": "ADQL",
"FORMAT": "votable"}
url="http://www.cadc.hia.nrc.gc.ca/tap/sync"
print url, data
return urllib.urlopen(url,urllib.urlencode(data)) | python | def TAPQuery(RAdeg=180.0, DECdeg=0.0, width=1, height=1):
"""Do a query of the CADC Megacam table. Get all observations insize the box. Returns a file-like object"""
QUERY =( """ SELECT """
""" COORD1(CENTROID(Plane.position_bounds)) AS "RAJ2000", COORD2(CENTROID(Plane.position_bounds)) AS "DEJ2000", Plane.time_bounds_lower as "MJDATE" """
""" FROM """
""" caom2.Observation as o JOIN caom2.Plane as Plane on o.obsID=Plane.obsID """
""" WHERE """
""" o.collection = 'CFHT' """
""" AND o.instrument_name = 'MegaPrime' """
""" AND INTERSECTS( BOX('ICRS', {}, {}, {}, {}), Plane.position_bounds ) = 1 """
""" AND ( o.proposal_id LIKE '%P05') """)
# """ AND ( o.proposal_id LIKE '%P05' OR o.proposal_id LIKE '%L03' or o.proposal_id LIKE '%L06' or o.proposal_id
# in ( '06AF33', '06BF98' ) ) """ )
QUERY = QUERY.format( RAdeg, DECdeg, width, height)
data={"QUERY": QUERY,
"REQUEST": "doQuery",
"LANG": "ADQL",
"FORMAT": "votable"}
url="http://www.cadc.hia.nrc.gc.ca/tap/sync"
print url, data
return urllib.urlopen(url,urllib.urlencode(data)) | [
"def",
"TAPQuery",
"(",
"RAdeg",
"=",
"180.0",
",",
"DECdeg",
"=",
"0.0",
",",
"width",
"=",
"1",
",",
"height",
"=",
"1",
")",
":",
"QUERY",
"=",
"(",
"\"\"\" SELECT \"\"\"",
"\"\"\" COORD1(CENTROID(Plane.position_bounds)) AS \"RAJ2000\", COORD2(CENTROID(Plane.positi... | Do a query of the CADC Megacam table. Get all observations insize the box. Returns a file-like object | [
"Do",
"a",
"query",
"of",
"the",
"CADC",
"Megacam",
"table",
".",
"Get",
"all",
"observations",
"insize",
"the",
"box",
".",
"Returns",
"a",
"file",
"-",
"like",
"object"
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/megacam.py#L5-L32 |
OSSOS/MOP | src/ossos/utils/check_mkpsf.py | check_tags | def check_tags(my_expnum, ops_set, my_ccds, dry_run=True):
"""
check the tags for the given expnum/ccd set.
@param ops:
@param my_expnum:
@return:
"""
tags = storage.get_tags(my_expnum)
count = 0
outcount = 0
fails = []
for ccd in my_ccds:
success = True
count += 1
for ops in ops_set:
for fake in ops[0]:
for my_program in ops[1]:
for version in ops[2]:
#print my_expnum, fake, my_program, version, ccd
key = storage.get_process_tag(fake + my_program, ccd, version)
uri = storage.tag_uri(key)
if "Failed to get image" in tags.get(uri, "Failed to get image"):
#print tags.get(uri, None)
fails.append(ccd)
success = False
if success:
outcount += 1
sys.stderr.write("{} {} {:5.1f}%\n".format(outcount, count,100* float(outcount)/count))
#print fails
return set(fails) | python | def check_tags(my_expnum, ops_set, my_ccds, dry_run=True):
"""
check the tags for the given expnum/ccd set.
@param ops:
@param my_expnum:
@return:
"""
tags = storage.get_tags(my_expnum)
count = 0
outcount = 0
fails = []
for ccd in my_ccds:
success = True
count += 1
for ops in ops_set:
for fake in ops[0]:
for my_program in ops[1]:
for version in ops[2]:
#print my_expnum, fake, my_program, version, ccd
key = storage.get_process_tag(fake + my_program, ccd, version)
uri = storage.tag_uri(key)
if "Failed to get image" in tags.get(uri, "Failed to get image"):
#print tags.get(uri, None)
fails.append(ccd)
success = False
if success:
outcount += 1
sys.stderr.write("{} {} {:5.1f}%\n".format(outcount, count,100* float(outcount)/count))
#print fails
return set(fails) | [
"def",
"check_tags",
"(",
"my_expnum",
",",
"ops_set",
",",
"my_ccds",
",",
"dry_run",
"=",
"True",
")",
":",
"tags",
"=",
"storage",
".",
"get_tags",
"(",
"my_expnum",
")",
"count",
"=",
"0",
"outcount",
"=",
"0",
"fails",
"=",
"[",
"]",
"for",
"ccd... | check the tags for the given expnum/ccd set.
@param ops:
@param my_expnum:
@return: | [
"check",
"the",
"tags",
"for",
"the",
"given",
"expnum",
"/",
"ccd",
"set",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/utils/check_mkpsf.py#L28-L57 |
OSSOS/MOP | src/jjk/preproc/verifyDetection.py | get_flipped_ext | def get_flipped_ext(file_id,ccd):
"""Given a list of exposure numbers and CCD, get them from the DB"""
import MOPfits
import os, shutil
filename=MOPfits.adGet(file_id,extno=int(ccd))
if int(ccd)<18:
tfname=filename+"F"
shutil.move(filename, tfname)
os.system("imcopy %s[-*,-*] %s" % (tfname, filename))
os.unlink(tfname)
if not os.access(filename,os.R_OK):
return(None)
return(filename) | python | def get_flipped_ext(file_id,ccd):
"""Given a list of exposure numbers and CCD, get them from the DB"""
import MOPfits
import os, shutil
filename=MOPfits.adGet(file_id,extno=int(ccd))
if int(ccd)<18:
tfname=filename+"F"
shutil.move(filename, tfname)
os.system("imcopy %s[-*,-*] %s" % (tfname, filename))
os.unlink(tfname)
if not os.access(filename,os.R_OK):
return(None)
return(filename) | [
"def",
"get_flipped_ext",
"(",
"file_id",
",",
"ccd",
")",
":",
"import",
"MOPfits",
"import",
"os",
",",
"shutil",
"filename",
"=",
"MOPfits",
".",
"adGet",
"(",
"file_id",
",",
"extno",
"=",
"int",
"(",
"ccd",
")",
")",
"if",
"int",
"(",
"ccd",
")"... | Given a list of exposure numbers and CCD, get them from the DB | [
"Given",
"a",
"list",
"of",
"exposure",
"numbers",
"and",
"CCD",
"get",
"them",
"from",
"the",
"DB"
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/verifyDetection.py#L17-L31 |
OSSOS/MOP | src/jjk/preproc/verifyDetection.py | get_file_ids | def get_file_ids(object):
"""Get the exposure for a particular line in the meausre table"""
import MOPdbaccess
mysql = MOPdbaccess.connect('cfeps','cfhls',dbSystem='MYSQL')
cfeps=mysql.cursor()
sql="SELECT file_id FROM measure WHERE provisional LIKE %s"
cfeps.execute(sql,(object, ))
file_ids=cfeps.fetchall()
return (file_ids) | python | def get_file_ids(object):
"""Get the exposure for a particular line in the meausre table"""
import MOPdbaccess
mysql = MOPdbaccess.connect('cfeps','cfhls',dbSystem='MYSQL')
cfeps=mysql.cursor()
sql="SELECT file_id FROM measure WHERE provisional LIKE %s"
cfeps.execute(sql,(object, ))
file_ids=cfeps.fetchall()
return (file_ids) | [
"def",
"get_file_ids",
"(",
"object",
")",
":",
"import",
"MOPdbaccess",
"mysql",
"=",
"MOPdbaccess",
".",
"connect",
"(",
"'cfeps'",
",",
"'cfhls'",
",",
"dbSystem",
"=",
"'MYSQL'",
")",
"cfeps",
"=",
"mysql",
".",
"cursor",
"(",
")",
"sql",
"=",
"\"SEL... | Get the exposure for a particular line in the meausre table | [
"Get",
"the",
"exposure",
"for",
"a",
"particular",
"line",
"in",
"the",
"meausre",
"table"
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/verifyDetection.py#L34-L43 |
OSSOS/MOP | src/ossos/core/scripts/update_header.py | main | def main():
"""Do the script."""
parser = argparse.ArgumentParser(
description='replace image header')
parser.add_argument('--extname',
help='name of extension to in header')
parser.add_argument('expnum', type=str,
help='exposure to update')
parser.add_argument('-r', '--replace',
action='store_true',
help='store modified image back to VOSpace?')
parser.add_argument('-v', '--verbose', action='store_true')
parser.add_argument('--debug', action='store_true')
parser.add_argument('--force', action='store_true', help="Re-run even if previous success recorded")
parser.add_argument('--dbimages', help="VOSpace DATA storage area.", default="vos:OSSOS/dbimages")
args = parser.parse_args()
task = util.task()
dependency = 'preproc'
prefix = ""
storage.DBIMAGES = args.dbimages
level = logging.CRITICAL
message_format = "%(message)s"
if args.verbose:
level = logging.INFO
if args.debug:
level = logging.DEBUG
message_format = "%(module)s %(funcName)s %(lineno)s %(message)s"
logging.basicConfig(level=level, format=message_format)
storage.set_logger(task, prefix, args.expnum, None, None, False)
message = storage.SUCCESS
expnum = args.expnum
exit_status = 0
try:
# skip if already succeeded and not in force mode
if storage.get_status(task, prefix, expnum, "p", 36) and not args.force:
logging.info("Already updated, skipping")
sys.exit(0)
image_hdulist = storage.get_image(args.expnum, return_file=False)
ast_hdulist = storage.get_astheader(expnum, ccd=None)
run_update_header(image_hdulist, ast_hdulist)
image_filename = os.path.basename(storage.get_uri(expnum))
image_hdulist.writeto(image_filename)
if args.replace:
dest = storage.dbimages_uri(expnum)
storage.copy(image_filename, dest)
storage.set_status('update_header', "", expnum, 'p', 36, message)
except Exception as e:
message = str(e)
if args.replace:
storage.set_status(task, prefix, expnum, 'p', 36, message)
exit_status = message
logging.error(message)
return exit_status | python | def main():
"""Do the script."""
parser = argparse.ArgumentParser(
description='replace image header')
parser.add_argument('--extname',
help='name of extension to in header')
parser.add_argument('expnum', type=str,
help='exposure to update')
parser.add_argument('-r', '--replace',
action='store_true',
help='store modified image back to VOSpace?')
parser.add_argument('-v', '--verbose', action='store_true')
parser.add_argument('--debug', action='store_true')
parser.add_argument('--force', action='store_true', help="Re-run even if previous success recorded")
parser.add_argument('--dbimages', help="VOSpace DATA storage area.", default="vos:OSSOS/dbimages")
args = parser.parse_args()
task = util.task()
dependency = 'preproc'
prefix = ""
storage.DBIMAGES = args.dbimages
level = logging.CRITICAL
message_format = "%(message)s"
if args.verbose:
level = logging.INFO
if args.debug:
level = logging.DEBUG
message_format = "%(module)s %(funcName)s %(lineno)s %(message)s"
logging.basicConfig(level=level, format=message_format)
storage.set_logger(task, prefix, args.expnum, None, None, False)
message = storage.SUCCESS
expnum = args.expnum
exit_status = 0
try:
# skip if already succeeded and not in force mode
if storage.get_status(task, prefix, expnum, "p", 36) and not args.force:
logging.info("Already updated, skipping")
sys.exit(0)
image_hdulist = storage.get_image(args.expnum, return_file=False)
ast_hdulist = storage.get_astheader(expnum, ccd=None)
run_update_header(image_hdulist, ast_hdulist)
image_filename = os.path.basename(storage.get_uri(expnum))
image_hdulist.writeto(image_filename)
if args.replace:
dest = storage.dbimages_uri(expnum)
storage.copy(image_filename, dest)
storage.set_status('update_header', "", expnum, 'p', 36, message)
except Exception as e:
message = str(e)
if args.replace:
storage.set_status(task, prefix, expnum, 'p', 36, message)
exit_status = message
logging.error(message)
return exit_status | [
"def",
"main",
"(",
")",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"description",
"=",
"'replace image header'",
")",
"parser",
".",
"add_argument",
"(",
"'--extname'",
",",
"help",
"=",
"'name of extension to in header'",
")",
"parser",
".",
"... | Do the script. | [
"Do",
"the",
"script",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/scripts/update_header.py#L48-L108 |
eelregit/mcfit | mcfit/kernels.py | _deriv | def _deriv(UK, deriv):
"""Real deriv is to :math:`t`, complex deriv is to :math:`\ln t`"""
if deriv == 0:
return UK
if isinstance(deriv, complex):
def UKderiv(z):
return (-z) ** deriv.imag * UK(z)
return UKderiv
def UKderiv(z):
poly = arange(deriv) + 1
poly = poly - z if ndim(z) == 0 else poly - z.reshape(-1, 1)
poly = poly.prod(axis=-1)
return poly * UK(z - deriv)
return UKderiv | python | def _deriv(UK, deriv):
"""Real deriv is to :math:`t`, complex deriv is to :math:`\ln t`"""
if deriv == 0:
return UK
if isinstance(deriv, complex):
def UKderiv(z):
return (-z) ** deriv.imag * UK(z)
return UKderiv
def UKderiv(z):
poly = arange(deriv) + 1
poly = poly - z if ndim(z) == 0 else poly - z.reshape(-1, 1)
poly = poly.prod(axis=-1)
return poly * UK(z - deriv)
return UKderiv | [
"def",
"_deriv",
"(",
"UK",
",",
"deriv",
")",
":",
"if",
"deriv",
"==",
"0",
":",
"return",
"UK",
"if",
"isinstance",
"(",
"deriv",
",",
"complex",
")",
":",
"def",
"UKderiv",
"(",
"z",
")",
":",
"return",
"(",
"-",
"z",
")",
"**",
"deriv",
".... | Real deriv is to :math:`t`, complex deriv is to :math:`\ln t` | [
"Real",
"deriv",
"is",
"to",
":",
"math",
":",
"t",
"complex",
"deriv",
"is",
"to",
":",
"math",
":",
"\\",
"ln",
"t"
] | train | https://github.com/eelregit/mcfit/blob/ef04b92df929425c44c62743c1ce7e0b81a26815/mcfit/kernels.py#L9-L24 |
OSSOS/MOP | src/ossos/core/ossos/planning/invariable.py | convert | def convert(lat, lon):
"""convert lat/lon from the ecliptic to the invariable plane."""
x = numpy.cos(lon) * numpy.cos(lat)
y = numpy.sin(lon) * numpy.cos(lat)
z = numpy.sin(lat)
# Invariable plane: values in arcseconds.
epsilon = 5713.86
omega = 387390.8
coseps = numpy.cos(epsilon * secrad)
sineps = numpy.sin(epsilon * secrad)
cosom = numpy.cos(omega * secrad)
sinom = numpy.sin(omega * secrad)
xi = x * cosom + y * sinom
yi = coseps * (-sinom * x + cosom * y) + sineps * z
zi = - sineps * (-sinom * x + cosom * y) + coseps * z
lat = numpy.arcsin(zi)
lon = numpy.arctan2(yi, xi)
return (lat, lon) | python | def convert(lat, lon):
"""convert lat/lon from the ecliptic to the invariable plane."""
x = numpy.cos(lon) * numpy.cos(lat)
y = numpy.sin(lon) * numpy.cos(lat)
z = numpy.sin(lat)
# Invariable plane: values in arcseconds.
epsilon = 5713.86
omega = 387390.8
coseps = numpy.cos(epsilon * secrad)
sineps = numpy.sin(epsilon * secrad)
cosom = numpy.cos(omega * secrad)
sinom = numpy.sin(omega * secrad)
xi = x * cosom + y * sinom
yi = coseps * (-sinom * x + cosom * y) + sineps * z
zi = - sineps * (-sinom * x + cosom * y) + coseps * z
lat = numpy.arcsin(zi)
lon = numpy.arctan2(yi, xi)
return (lat, lon) | [
"def",
"convert",
"(",
"lat",
",",
"lon",
")",
":",
"x",
"=",
"numpy",
".",
"cos",
"(",
"lon",
")",
"*",
"numpy",
".",
"cos",
"(",
"lat",
")",
"y",
"=",
"numpy",
".",
"sin",
"(",
"lon",
")",
"*",
"numpy",
".",
"cos",
"(",
"lat",
")",
"z",
... | convert lat/lon from the ecliptic to the invariable plane. | [
"convert",
"lat",
"/",
"lon",
"from",
"the",
"ecliptic",
"to",
"the",
"invariable",
"plane",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/invariable.py#L6-L28 |
OSSOS/MOP | src/jjk/preproc/s1.py | searchTriples | def searchTriples(expnums,ccd):
"""Given a list of exposure numbers, find all the KBOs in that set of exposures"""
import MOPfits,os
import MOPdbaccess
if len(expnums)!=3:
return(-1)
mysql=MOPdbaccess.connect('bucket','cfhls','MYSQL')
bucket=mysql.cursor()
### Some program Constants
proc_file = open("proc-these-files","w")
proc_file.write("# Files to be planted and searched\n")
proc_file.write("# image fwhm plant\n")
import string
import os.path
filenames=[]
import pyfits
for expnum in expnums:
bucket.execute("SELECT obs_iq_refccd FROM exposure WHERE expnum=%s" , (expnum, ) )
row=bucket.fetchone()
fwhm=row[0]
if not fwhm > 0:
fwhm=1.0
if int(ccd)<18:
cutout="[-*,-*]"
else:
cutout=None
filename=MOPfits.adGet(str(expnum)+"p",extno=int(ccd),cutout=cutout)
print filename
if not os.access(filename,os.R_OK):
return(-3)
filename=os.path.splitext(filename)
filenames.append(filename[0])
proc_file.write("%s %f %s \n" % ( filename[0], fwhm/0.183, "no"))
proc_file.flush()
proc_file.close()
command="find.pl -p '' -d ./ "
sys.stderr.write(command)
try:
os.system(command)
except:
sys.stderr.write("Failed while running find")
file_extens=[
"cands.comb",
"measure3.cands.astrom",
"measure3.WARNING",
"measure3.astrom.scatter"]
if os.access("find.OK",os.R_OK):
os.system("touch /home/cadc/kavelaar/results/05AQ06B/"+filenames[0]+".OK")
else:
os.system("touch /home/cadc/kavelaar/results/05AQ06B/"+filenames[0]+".FAILED")
### look for the cand.comb file and store in the DB
import shutil
for ext in file_extens:
if os.access(filenames[0]+"."+ext,os.R_OK):
shutil.copy(filenames[0]+"."+ext,"/home/cadc/kavelaar/results/05AQ06B")
astrom=filenames[0]+".measure3.cands.astrom"
print astrom
cmd = "mpc_gen.pl -c "+astrom
print os.access(astrom,os.R_OK)
if os.access(astrom,os.R_OK):
print cmd
os.system(cmd)
os.system("mpcIngest.pl *.MPC")
os.system("cp *.MPC /home/cadc/kavelaar/results/05AQ06B")
return(1)
return(0) | python | def searchTriples(expnums,ccd):
"""Given a list of exposure numbers, find all the KBOs in that set of exposures"""
import MOPfits,os
import MOPdbaccess
if len(expnums)!=3:
return(-1)
mysql=MOPdbaccess.connect('bucket','cfhls','MYSQL')
bucket=mysql.cursor()
### Some program Constants
proc_file = open("proc-these-files","w")
proc_file.write("# Files to be planted and searched\n")
proc_file.write("# image fwhm plant\n")
import string
import os.path
filenames=[]
import pyfits
for expnum in expnums:
bucket.execute("SELECT obs_iq_refccd FROM exposure WHERE expnum=%s" , (expnum, ) )
row=bucket.fetchone()
fwhm=row[0]
if not fwhm > 0:
fwhm=1.0
if int(ccd)<18:
cutout="[-*,-*]"
else:
cutout=None
filename=MOPfits.adGet(str(expnum)+"p",extno=int(ccd),cutout=cutout)
print filename
if not os.access(filename,os.R_OK):
return(-3)
filename=os.path.splitext(filename)
filenames.append(filename[0])
proc_file.write("%s %f %s \n" % ( filename[0], fwhm/0.183, "no"))
proc_file.flush()
proc_file.close()
command="find.pl -p '' -d ./ "
sys.stderr.write(command)
try:
os.system(command)
except:
sys.stderr.write("Failed while running find")
file_extens=[
"cands.comb",
"measure3.cands.astrom",
"measure3.WARNING",
"measure3.astrom.scatter"]
if os.access("find.OK",os.R_OK):
os.system("touch /home/cadc/kavelaar/results/05AQ06B/"+filenames[0]+".OK")
else:
os.system("touch /home/cadc/kavelaar/results/05AQ06B/"+filenames[0]+".FAILED")
### look for the cand.comb file and store in the DB
import shutil
for ext in file_extens:
if os.access(filenames[0]+"."+ext,os.R_OK):
shutil.copy(filenames[0]+"."+ext,"/home/cadc/kavelaar/results/05AQ06B")
astrom=filenames[0]+".measure3.cands.astrom"
print astrom
cmd = "mpc_gen.pl -c "+astrom
print os.access(astrom,os.R_OK)
if os.access(astrom,os.R_OK):
print cmd
os.system(cmd)
os.system("mpcIngest.pl *.MPC")
os.system("cp *.MPC /home/cadc/kavelaar/results/05AQ06B")
return(1)
return(0) | [
"def",
"searchTriples",
"(",
"expnums",
",",
"ccd",
")",
":",
"import",
"MOPfits",
",",
"os",
"import",
"MOPdbaccess",
"if",
"len",
"(",
"expnums",
")",
"!=",
"3",
":",
"return",
"(",
"-",
"1",
")",
"mysql",
"=",
"MOPdbaccess",
".",
"connect",
"(",
"... | Given a list of exposure numbers, find all the KBOs in that set of exposures | [
"Given",
"a",
"list",
"of",
"exposure",
"numbers",
"find",
"all",
"the",
"KBOs",
"in",
"that",
"set",
"of",
"exposures"
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/s1.py#L17-L97 |
OSSOS/MOP | src/ossos/core/ossos/gui/errorhandling.py | DownloadErrorHandler.handle_error | def handle_error(self, error, download_request):
"""
Checks what error occured and looks for an appropriate solution.
Args:
error: Exception
The error that has occured.
download_request:
The request which resulted in the error.
"""
if hasattr(error, "errno") and error.errno == errno.EACCES:
self.handle_certificate_problem(str(error))
else:
self.handle_general_download_error(str(error), download_request) | python | def handle_error(self, error, download_request):
"""
Checks what error occured and looks for an appropriate solution.
Args:
error: Exception
The error that has occured.
download_request:
The request which resulted in the error.
"""
if hasattr(error, "errno") and error.errno == errno.EACCES:
self.handle_certificate_problem(str(error))
else:
self.handle_general_download_error(str(error), download_request) | [
"def",
"handle_error",
"(",
"self",
",",
"error",
",",
"download_request",
")",
":",
"if",
"hasattr",
"(",
"error",
",",
"\"errno\"",
")",
"and",
"error",
".",
"errno",
"==",
"errno",
".",
"EACCES",
":",
"self",
".",
"handle_certificate_problem",
"(",
"str... | Checks what error occured and looks for an appropriate solution.
Args:
error: Exception
The error that has occured.
download_request:
The request which resulted in the error. | [
"Checks",
"what",
"error",
"occured",
"and",
"looks",
"for",
"an",
"appropriate",
"solution",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/gui/errorhandling.py#L19-L32 |
OSSOS/MOP | src/ossos/core/ossos/gui/models/validation.py | ValidationModel.get_current_observation_date | def get_current_observation_date(self):
"""
Get the date of the current observation by looking in the header
of the observation for the DATE and EXPTIME keywords.
The 'DATE AT MIDDLE OF OBSERVATION' of the observation is returned
@return: Time
"""
# All HDU elements have the same date and time so just use
# last one, sometimes the first one is missing the header, in MEF
header = self.get_current_cutout().hdulist[-1].header
mjd_obs = float(header.get('MJD-OBS'))
exptime = float(header.get('EXPTIME'))
mpc_date = Time(mjd_obs,
format='mjd',
scale='utc',
precision=config.read('MPC.DATE_PRECISION'))
mpc_date += TimeDelta(exptime * units.second) / 2.0
mpc_date = mpc_date.mpc
return mpc_date | python | def get_current_observation_date(self):
"""
Get the date of the current observation by looking in the header
of the observation for the DATE and EXPTIME keywords.
The 'DATE AT MIDDLE OF OBSERVATION' of the observation is returned
@return: Time
"""
# All HDU elements have the same date and time so just use
# last one, sometimes the first one is missing the header, in MEF
header = self.get_current_cutout().hdulist[-1].header
mjd_obs = float(header.get('MJD-OBS'))
exptime = float(header.get('EXPTIME'))
mpc_date = Time(mjd_obs,
format='mjd',
scale='utc',
precision=config.read('MPC.DATE_PRECISION'))
mpc_date += TimeDelta(exptime * units.second) / 2.0
mpc_date = mpc_date.mpc
return mpc_date | [
"def",
"get_current_observation_date",
"(",
"self",
")",
":",
"# All HDU elements have the same date and time so just use",
"# last one, sometimes the first one is missing the header, in MEF",
"header",
"=",
"self",
".",
"get_current_cutout",
"(",
")",
".",
"hdulist",
"[",
"-",
... | Get the date of the current observation by looking in the header
of the observation for the DATE and EXPTIME keywords.
The 'DATE AT MIDDLE OF OBSERVATION' of the observation is returned
@return: Time | [
"Get",
"the",
"date",
"of",
"the",
"current",
"observation",
"by",
"looking",
"in",
"the",
"header",
"of",
"the",
"observation",
"for",
"the",
"DATE",
"and",
"EXPTIME",
"keywords",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/gui/models/validation.py#L222-L241 |
OSSOS/MOP | src/ossos/utils/effunction.py | square | def square(m, eff_max,c,m0,sigma,m1=21):
"""
eff_max: Maximum of the efficiency function (peak efficiency)
c: shape of the drop-off in the efficiency at bright end
m0: transition to tappering at faint magnitudes
sigma: width of the transition in efficeincy
m1: magnitude at which peak efficeincy occurs
square(m) = (eff_max-c*(m-21)**2)/(1+exp((m-M_0)/sig))
"""
return (eff_max-c*(m-21)**2)/(1+numpy.exp((m-m0)/sigma)) | python | def square(m, eff_max,c,m0,sigma,m1=21):
"""
eff_max: Maximum of the efficiency function (peak efficiency)
c: shape of the drop-off in the efficiency at bright end
m0: transition to tappering at faint magnitudes
sigma: width of the transition in efficeincy
m1: magnitude at which peak efficeincy occurs
square(m) = (eff_max-c*(m-21)**2)/(1+exp((m-M_0)/sig))
"""
return (eff_max-c*(m-21)**2)/(1+numpy.exp((m-m0)/sigma)) | [
"def",
"square",
"(",
"m",
",",
"eff_max",
",",
"c",
",",
"m0",
",",
"sigma",
",",
"m1",
"=",
"21",
")",
":",
"return",
"(",
"eff_max",
"-",
"c",
"*",
"(",
"m",
"-",
"21",
")",
"**",
"2",
")",
"/",
"(",
"1",
"+",
"numpy",
".",
"exp",
"(",... | eff_max: Maximum of the efficiency function (peak efficiency)
c: shape of the drop-off in the efficiency at bright end
m0: transition to tappering at faint magnitudes
sigma: width of the transition in efficeincy
m1: magnitude at which peak efficeincy occurs
square(m) = (eff_max-c*(m-21)**2)/(1+exp((m-M_0)/sig)) | [
"eff_max",
":",
"Maximum",
"of",
"the",
"efficiency",
"function",
"(",
"peak",
"efficiency",
")",
"c",
":",
"shape",
"of",
"the",
"drop",
"-",
"off",
"in",
"the",
"efficiency",
"at",
"bright",
"end",
"m0",
":",
"transition",
"to",
"tappering",
"at",
"fai... | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/utils/effunction.py#L6-L17 |
OSSOS/MOP | src/ossos/utils/effunction.py | parse_square_param | def parse_square_param(line):
"""
Parse the line from the .eff file that contains the efficiency function
parameters for a 'square' function
line : the line containt the parameters, must start with 'square_param'
"""
if not line.startswith("square_param="):
raise ValueError("Not a valid square_param line")
values = line.split()
params = {'sigma': float(values.pop()),
'm0': float(values.pop()),
'c': float(values.pop()),
'eff_max': float(values.pop())
}
return params | python | def parse_square_param(line):
"""
Parse the line from the .eff file that contains the efficiency function
parameters for a 'square' function
line : the line containt the parameters, must start with 'square_param'
"""
if not line.startswith("square_param="):
raise ValueError("Not a valid square_param line")
values = line.split()
params = {'sigma': float(values.pop()),
'm0': float(values.pop()),
'c': float(values.pop()),
'eff_max': float(values.pop())
}
return params | [
"def",
"parse_square_param",
"(",
"line",
")",
":",
"if",
"not",
"line",
".",
"startswith",
"(",
"\"square_param=\"",
")",
":",
"raise",
"ValueError",
"(",
"\"Not a valid square_param line\"",
")",
"values",
"=",
"line",
".",
"split",
"(",
")",
"params",
"=",
... | Parse the line from the .eff file that contains the efficiency function
parameters for a 'square' function
line : the line containt the parameters, must start with 'square_param' | [
"Parse",
"the",
"line",
"from",
"the",
".",
"eff",
"file",
"that",
"contains",
"the",
"efficiency",
"function",
"parameters",
"for",
"a",
"square",
"function",
"line",
":",
"the",
"line",
"containt",
"the",
"parameters",
"must",
"start",
"with",
"square_param"... | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/utils/effunction.py#L19-L33 |
OSSOS/MOP | src/ossos/utils/effunction.py | parse_eff | def parse_eff(filename):
"""
Parse through Jean-Marcs OSSSO .eff files.
The efficiency files comes in 'chunks' meant to be used at different 'rates' of motion.
"""
blocks = []
block = {}
with open(filename) as efile:
for line in efile.readlines():
if line.lstrip().startswith("#"):
continue
keyword = line.lstrip().split("=")[0]
funcs = {'square_param': parse_square_param,
'rates': rates}
block[keyword] = funcs.get(keyword, dummy)(line)
if keyword == 'mag_lim':
blocks.append(block)
block = {}
return blocks | python | def parse_eff(filename):
"""
Parse through Jean-Marcs OSSSO .eff files.
The efficiency files comes in 'chunks' meant to be used at different 'rates' of motion.
"""
blocks = []
block = {}
with open(filename) as efile:
for line in efile.readlines():
if line.lstrip().startswith("#"):
continue
keyword = line.lstrip().split("=")[0]
funcs = {'square_param': parse_square_param,
'rates': rates}
block[keyword] = funcs.get(keyword, dummy)(line)
if keyword == 'mag_lim':
blocks.append(block)
block = {}
return blocks | [
"def",
"parse_eff",
"(",
"filename",
")",
":",
"blocks",
"=",
"[",
"]",
"block",
"=",
"{",
"}",
"with",
"open",
"(",
"filename",
")",
"as",
"efile",
":",
"for",
"line",
"in",
"efile",
".",
"readlines",
"(",
")",
":",
"if",
"line",
".",
"lstrip",
... | Parse through Jean-Marcs OSSSO .eff files.
The efficiency files comes in 'chunks' meant to be used at different 'rates' of motion. | [
"Parse",
"through",
"Jean",
"-",
"Marcs",
"OSSSO",
".",
"eff",
"files",
".",
"The",
"efficiency",
"files",
"comes",
"in",
"chunks",
"meant",
"to",
"be",
"used",
"at",
"different",
"rates",
"of",
"motion",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/utils/effunction.py#L57-L79 |
OSSOS/MOP | src/ossos/core/ossos/gui/config.py | read | def read(keypath, configfile=None):
"""
Reads a value from the configuration file.
Args:
keypath: str
Specifies the key for which the value is desired. It can be a
hierarchical path. Example: "section1.subsection.key1"
configfile: str
Path to the config file to read. Defaults to None, in which case
the application's default config file is used.
Returns:
value from configuration file
"""
if configfile in _configs:
appconfig = _configs[configfile]
else:
appconfig = AppConfig(configfile=configfile)
_configs[configfile] = appconfig
return appconfig.read(keypath) | python | def read(keypath, configfile=None):
"""
Reads a value from the configuration file.
Args:
keypath: str
Specifies the key for which the value is desired. It can be a
hierarchical path. Example: "section1.subsection.key1"
configfile: str
Path to the config file to read. Defaults to None, in which case
the application's default config file is used.
Returns:
value from configuration file
"""
if configfile in _configs:
appconfig = _configs[configfile]
else:
appconfig = AppConfig(configfile=configfile)
_configs[configfile] = appconfig
return appconfig.read(keypath) | [
"def",
"read",
"(",
"keypath",
",",
"configfile",
"=",
"None",
")",
":",
"if",
"configfile",
"in",
"_configs",
":",
"appconfig",
"=",
"_configs",
"[",
"configfile",
"]",
"else",
":",
"appconfig",
"=",
"AppConfig",
"(",
"configfile",
"=",
"configfile",
")",... | Reads a value from the configuration file.
Args:
keypath: str
Specifies the key for which the value is desired. It can be a
hierarchical path. Example: "section1.subsection.key1"
configfile: str
Path to the config file to read. Defaults to None, in which case
the application's default config file is used.
Returns:
value from configuration file | [
"Reads",
"a",
"value",
"from",
"the",
"configuration",
"file",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/gui/config.py#L10-L31 |
OSSOS/MOP | src/ossos/core/ossos/ephem_target.py | EphemTarget._cdata_header | def _cdata_header(self, colsep="|"):
"""
Create a header for the CDATA section, as a visual guide.
"""
fields = self.fields
header_lines = []
line = ""
for fieldName in self.field_names:
width = int(fields[fieldName]['attr']['width'])
line += self._entry(fieldName, width, colsep)
header_lines.append(line)
line = ""
for fieldName in self.field_names:
width = int(fields[fieldName]['attr']['width'])
line += self._entry(fields[fieldName]['attr']['format'], width=width, colsep=colsep)
header_lines.append(line)
line = ""
for fieldName in self.field_names:
width = int(fields[fieldName]['attr']['width'])
(l, m) = divmod(width, 10)
guide = ""
for i in range(l):
guide += "".join(map(str, range(10)))
guide += "".join(map(str, range(m)))
line += self._entry(guide, width=width, colsep=colsep)
header_lines.append(line)
line = ""
for fieldName in self.field_names:
width = int(fields[fieldName]['attr']['width'])
guide = "-" * width
line += self._entry(guide, width=width, colsep=colsep)
header_lines.append(line)
return header_lines | python | def _cdata_header(self, colsep="|"):
"""
Create a header for the CDATA section, as a visual guide.
"""
fields = self.fields
header_lines = []
line = ""
for fieldName in self.field_names:
width = int(fields[fieldName]['attr']['width'])
line += self._entry(fieldName, width, colsep)
header_lines.append(line)
line = ""
for fieldName in self.field_names:
width = int(fields[fieldName]['attr']['width'])
line += self._entry(fields[fieldName]['attr']['format'], width=width, colsep=colsep)
header_lines.append(line)
line = ""
for fieldName in self.field_names:
width = int(fields[fieldName]['attr']['width'])
(l, m) = divmod(width, 10)
guide = ""
for i in range(l):
guide += "".join(map(str, range(10)))
guide += "".join(map(str, range(m)))
line += self._entry(guide, width=width, colsep=colsep)
header_lines.append(line)
line = ""
for fieldName in self.field_names:
width = int(fields[fieldName]['attr']['width'])
guide = "-" * width
line += self._entry(guide, width=width, colsep=colsep)
header_lines.append(line)
return header_lines | [
"def",
"_cdata_header",
"(",
"self",
",",
"colsep",
"=",
"\"|\"",
")",
":",
"fields",
"=",
"self",
".",
"fields",
"header_lines",
"=",
"[",
"]",
"line",
"=",
"\"\"",
"for",
"fieldName",
"in",
"self",
".",
"field_names",
":",
"width",
"=",
"int",
"(",
... | Create a header for the CDATA section, as a visual guide. | [
"Create",
"a",
"header",
"for",
"the",
"CDATA",
"section",
"as",
"a",
"visual",
"guide",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/ephem_target.py#L103-L139 |
OSSOS/MOP | src/ossos/core/ossos/ephem_target.py | EphemTarget._append_cdata | def _append_cdata(self, coordinate):
"""
Append an target location to the ephemeris listing.
"""
fields = self.fields
sra = coordinate.ra.to_string(units.hour, sep=':', precision=2, pad=True)
sdec = coordinate.dec.to_string(units.degree, sep=':', precision=1, alwayssign=True)
coord = SkyCoord(sra + " " + sdec, unit=(units.hour, units.degree))
sra = coord.ra.to_string(units.hour, sep=":", precision=2, pad=True)
sdec = coord.dec.to_string(units.degree, sep=":", precision=1, pad=True, alwayssign=True)
sdate = str(coordinate.obstime.replicate(format('iso')))
self.cdata.appendData(self._entry(sdate, fields["DATE_UTC"]['attr']['width'], colsep=self.column_separator))
self.cdata.appendData(self._entry(sra, fields["RA_J2000"]['attr']['width'], colsep=self.column_separator))
self.cdata.appendData(self._entry(sdec, fields["DEC_J2000"]["attr"]["width"], colsep=self.column_separator))
self.cdata.appendData("\n") | python | def _append_cdata(self, coordinate):
"""
Append an target location to the ephemeris listing.
"""
fields = self.fields
sra = coordinate.ra.to_string(units.hour, sep=':', precision=2, pad=True)
sdec = coordinate.dec.to_string(units.degree, sep=':', precision=1, alwayssign=True)
coord = SkyCoord(sra + " " + sdec, unit=(units.hour, units.degree))
sra = coord.ra.to_string(units.hour, sep=":", precision=2, pad=True)
sdec = coord.dec.to_string(units.degree, sep=":", precision=1, pad=True, alwayssign=True)
sdate = str(coordinate.obstime.replicate(format('iso')))
self.cdata.appendData(self._entry(sdate, fields["DATE_UTC"]['attr']['width'], colsep=self.column_separator))
self.cdata.appendData(self._entry(sra, fields["RA_J2000"]['attr']['width'], colsep=self.column_separator))
self.cdata.appendData(self._entry(sdec, fields["DEC_J2000"]["attr"]["width"], colsep=self.column_separator))
self.cdata.appendData("\n") | [
"def",
"_append_cdata",
"(",
"self",
",",
"coordinate",
")",
":",
"fields",
"=",
"self",
".",
"fields",
"sra",
"=",
"coordinate",
".",
"ra",
".",
"to_string",
"(",
"units",
".",
"hour",
",",
"sep",
"=",
"':'",
",",
"precision",
"=",
"2",
",",
"pad",
... | Append an target location to the ephemeris listing. | [
"Append",
"an",
"target",
"location",
"to",
"the",
"ephemeris",
"listing",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/ephem_target.py#L144-L158 |
OSSOS/MOP | src/ossos/core/ossos/ephem_target.py | EphemTarget.gemini_writer | def gemini_writer(self, f_handle):
"""
Write out a GEMINI formated OT ephemeris. This is just a hack of SSD Horizons output.
"""
f_handle.write(GEMINI_HEADER)
# Date__(UT)__HR:MN Date_________JDUT R.A.___(ICRF/J2000.0)___DEC dRA*cosD d(DEC)/dt
# 1 2 3 4 5 6 7 8 9
# 123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890
# ' 2019-Jan-30 00:00 01 46 56.46 +10 28 54.9 01 47 56.17 +10 34 27.6 3.520
for coordinate in self.coordinates:
date = coordinate.obstime.datetime.strftime('%Y-%b-%d %H:%M')[:17]
f_handle.write(" {:16} {:17.9f} {:27} {:+8.5f} {:+8.5f}\n".format(date,
coordinate.obstime.jd,
coordinate.to_string('hmsdms',
sep=' ',
precision=4,
pad=True)[:27],
float(coordinate.dra),
float(coordinate.ddec)),
)
f_handle.write(GEMINI_FOOTER)
return | python | def gemini_writer(self, f_handle):
"""
Write out a GEMINI formated OT ephemeris. This is just a hack of SSD Horizons output.
"""
f_handle.write(GEMINI_HEADER)
# Date__(UT)__HR:MN Date_________JDUT R.A.___(ICRF/J2000.0)___DEC dRA*cosD d(DEC)/dt
# 1 2 3 4 5 6 7 8 9
# 123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890
# ' 2019-Jan-30 00:00 01 46 56.46 +10 28 54.9 01 47 56.17 +10 34 27.6 3.520
for coordinate in self.coordinates:
date = coordinate.obstime.datetime.strftime('%Y-%b-%d %H:%M')[:17]
f_handle.write(" {:16} {:17.9f} {:27} {:+8.5f} {:+8.5f}\n".format(date,
coordinate.obstime.jd,
coordinate.to_string('hmsdms',
sep=' ',
precision=4,
pad=True)[:27],
float(coordinate.dra),
float(coordinate.ddec)),
)
f_handle.write(GEMINI_FOOTER)
return | [
"def",
"gemini_writer",
"(",
"self",
",",
"f_handle",
")",
":",
"f_handle",
".",
"write",
"(",
"GEMINI_HEADER",
")",
"# Date__(UT)__HR:MN Date_________JDUT R.A.___(ICRF/J2000.0)___DEC dRA*cosD d(DEC)/dt",
"# 1 2 3 4 5 6 7 ... | Write out a GEMINI formated OT ephemeris. This is just a hack of SSD Horizons output. | [
"Write",
"out",
"a",
"GEMINI",
"formated",
"OT",
"ephemeris",
".",
"This",
"is",
"just",
"a",
"hack",
"of",
"SSD",
"Horizons",
"output",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/ephem_target.py#L175-L197 |
OSSOS/MOP | src/ossos/utils/proc_status.py | mkpsf_failures | def mkpsf_failures():
"""A simple script to loop over the standard tags for the mkpsf and
step1 processing steps. If exposure/ccd combo isn't marked as
'success' then report the failure.
This example uses the vos client directly.
"""
for expnum in storage.list_dbimages():
for ccd in range(36):
if not storage.get_status(MKPSF, "", expnum, "p", ccd):
# get_status returns FALSE if process didn't succeed,
# with return_message=True it returns the error message.
print expnum, ccd, storage.get_status(MKPSF, "", expnum, "p", ccd, return_message=True) | python | def mkpsf_failures():
"""A simple script to loop over the standard tags for the mkpsf and
step1 processing steps. If exposure/ccd combo isn't marked as
'success' then report the failure.
This example uses the vos client directly.
"""
for expnum in storage.list_dbimages():
for ccd in range(36):
if not storage.get_status(MKPSF, "", expnum, "p", ccd):
# get_status returns FALSE if process didn't succeed,
# with return_message=True it returns the error message.
print expnum, ccd, storage.get_status(MKPSF, "", expnum, "p", ccd, return_message=True) | [
"def",
"mkpsf_failures",
"(",
")",
":",
"for",
"expnum",
"in",
"storage",
".",
"list_dbimages",
"(",
")",
":",
"for",
"ccd",
"in",
"range",
"(",
"36",
")",
":",
"if",
"not",
"storage",
".",
"get_status",
"(",
"MKPSF",
",",
"\"\"",
",",
"expnum",
",",... | A simple script to loop over the standard tags for the mkpsf and
step1 processing steps. If exposure/ccd combo isn't marked as
'success' then report the failure.
This example uses the vos client directly. | [
"A",
"simple",
"script",
"to",
"loop",
"over",
"the",
"standard",
"tags",
"for",
"the",
"mkpsf",
"and",
"step1",
"processing",
"steps",
".",
"If",
"exposure",
"/",
"ccd",
"combo",
"isn",
"t",
"marked",
"as",
"success",
"then",
"report",
"the",
"failure",
... | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/utils/proc_status.py#L7-L20 |
OSSOS/MOP | src/jjk/preproc/search2.py | searchTriples | def searchTriples(expnums,ccd,plant=False):
"""Given a list of exposure numbers, find all the KBOs in that set of exposures"""
import MOPfits,os
import MOPdbaccess
if len(expnums)!=3:
raise TaskError, "got %d exposures"%(len(expnums))
### Some program Constants
proc_these_files=[]
if not plant:
proc_these_files.append("# Files to be planted and searched\n")
proc_these_files.append("# image fwhm plant\n")
import string
import os.path
filenames=[]
import pyfits
for expnum in expnums:
### Get the processed images from AD
if int(ccd)<18:
cutout="[-*,-*]"
else:
cutout="[*,*]"
filename=MOPfits.adGet(str(expnum)+opt.raw,extno=int(ccd),cutout=cutout)
if not os.access(filename,os.R_OK):
sys.stderr.write("Ad Get Failed\n")
raise TaskError, 'adGet Failed'
if opt.none:
continue
filename=os.path.splitext(filename)
filenames.append(filename[0])
try:
mysql=MOPdbaccess.connect('bucket','cfhls','MYSQL')
bucket=mysql.cursor()
except:
raise TaskError, "mysql failed"
bucket.execute("SELECT obs_iq_refccd FROM exposure WHERE expnum=%s" , (expnum, ) )
row=bucket.fetchone()
mysql.close()
fwhm=row[0]
if not fwhm > 0:
fwhm=1.0
if not plant:
#proc_these_files.append("%s %f %s \n" % ( filename[0], fwhm/0.183, 'no'))
pstr='NO'
else:
pstr='YES'
### since we're planting we need a psf. JMPMAKEPSF will
### update the proc-these-files listing
### run the make psf script .. always. This creates proc-these-files
### which is needed by the find.pl script.
command='jmpmakepsf.csh ./ %s %s' % ( filename[0], pstr )
if opt.verbose:
sys.stderr.write( command )
try:
os.system(command)
except:
raise TaskError, "jmpmakepsf noexec"
if os.access(filename[0]+'.jmpmakepsf.FAILED',os.R_OK) or not os.access(filename[0]+".psf.fits", os.R_OK) :
# if plant:
# raise TaskError, "jmpmakepsf failed"
# do without plant
if 1==1 :
plant=False
pstr='NO'
### we're not planting so, lets keep going
### but check that there is a line in proc_these_files
add_line=True
if not os.access('proc-these-files',os.R_OK):
f=open('proc-these-files','w')
for l in proc_these_files:
f.write(l)
f.close()
f=open('proc-these-files','r')
ptf_lines=f.readlines()
f.close()
for ptf_line in ptf_lines:
if ptf_line[0]=='#':
continue
ptf_a=ptf_line.split()
import re
if re.search('%s' % (filename[0]),ptf_a[0]):
### there's already a line for this one
add_line=False
break
if add_line:
f=open('proc-these-files','a')
f.write("%s %f %s \n" % ( filename[0], fwhm/0.183, 'no'))
f.close()
if opt.none:
return(-1)
prefix=''
if plant:
command="plant.csh ./ -rmin %s -rmax %s -ang %s -width %s " % ( opt.rmin, opt.rmax, opt.angle, opt.width)
try:
os.system(command)
except:
raise TaskError, 'plant exec. failed'
if not os.access('plant.OK',os.R_OK):
raise TaskError, 'plant failed'
prefix='fk'
#else:
# f=open('proc-these-files','w')
# for line in proc_these_files:
# f.write(line)
# f.flush()
# f.close()
if opt.rerun and os.access('find.OK',os.R_OK):
os.unlink("find.OK")
command="find.pl -p "+prefix+" -rn %s -rx %s -a %s -aw %s -d ./ " % ( opt.rmin, opt.rmax, opt.angle, opt.width)
#command="find.pl -p "+prefix+" -d ./ "
if opt.verbose:
sys.stderr.write( command )
try:
os.system(command)
except:
raise TaskErorr, "execute find"
if not os.access("find.OK",os.R_OK):
raise TaskError, "find failed"
### check the transformation file
command = "checktrans -p "+prefix
try:
os.system(command)
except:
raise TaskError, "execute checktrans"
if not os.access("checktrans.OK",os.R_OK):
raise TaskError, "checktrans failed"
if os.access("BAD_TRANS"+prefix,os.R_OK):
raise TaskError,"BAD TRANS"
astrom=prefix+filenames[0]+".cands.comb"
if opt.plant:
astrom=prefix+filenames[0]+".comb.found"
try:
#make sure we have +10 lines in this file
lines=file(astrom).readlines()
if len(lines)<10:
raise TaskError,"Too few Found"
except:
raise TaskError, "Error reading %s" %(astrom)
if os.access(astrom,os.R_OK):
return(1)
else:
return(0) | python | def searchTriples(expnums,ccd,plant=False):
"""Given a list of exposure numbers, find all the KBOs in that set of exposures"""
import MOPfits,os
import MOPdbaccess
if len(expnums)!=3:
raise TaskError, "got %d exposures"%(len(expnums))
### Some program Constants
proc_these_files=[]
if not plant:
proc_these_files.append("# Files to be planted and searched\n")
proc_these_files.append("# image fwhm plant\n")
import string
import os.path
filenames=[]
import pyfits
for expnum in expnums:
### Get the processed images from AD
if int(ccd)<18:
cutout="[-*,-*]"
else:
cutout="[*,*]"
filename=MOPfits.adGet(str(expnum)+opt.raw,extno=int(ccd),cutout=cutout)
if not os.access(filename,os.R_OK):
sys.stderr.write("Ad Get Failed\n")
raise TaskError, 'adGet Failed'
if opt.none:
continue
filename=os.path.splitext(filename)
filenames.append(filename[0])
try:
mysql=MOPdbaccess.connect('bucket','cfhls','MYSQL')
bucket=mysql.cursor()
except:
raise TaskError, "mysql failed"
bucket.execute("SELECT obs_iq_refccd FROM exposure WHERE expnum=%s" , (expnum, ) )
row=bucket.fetchone()
mysql.close()
fwhm=row[0]
if not fwhm > 0:
fwhm=1.0
if not plant:
#proc_these_files.append("%s %f %s \n" % ( filename[0], fwhm/0.183, 'no'))
pstr='NO'
else:
pstr='YES'
### since we're planting we need a psf. JMPMAKEPSF will
### update the proc-these-files listing
### run the make psf script .. always. This creates proc-these-files
### which is needed by the find.pl script.
command='jmpmakepsf.csh ./ %s %s' % ( filename[0], pstr )
if opt.verbose:
sys.stderr.write( command )
try:
os.system(command)
except:
raise TaskError, "jmpmakepsf noexec"
if os.access(filename[0]+'.jmpmakepsf.FAILED',os.R_OK) or not os.access(filename[0]+".psf.fits", os.R_OK) :
# if plant:
# raise TaskError, "jmpmakepsf failed"
# do without plant
if 1==1 :
plant=False
pstr='NO'
### we're not planting so, lets keep going
### but check that there is a line in proc_these_files
add_line=True
if not os.access('proc-these-files',os.R_OK):
f=open('proc-these-files','w')
for l in proc_these_files:
f.write(l)
f.close()
f=open('proc-these-files','r')
ptf_lines=f.readlines()
f.close()
for ptf_line in ptf_lines:
if ptf_line[0]=='#':
continue
ptf_a=ptf_line.split()
import re
if re.search('%s' % (filename[0]),ptf_a[0]):
### there's already a line for this one
add_line=False
break
if add_line:
f=open('proc-these-files','a')
f.write("%s %f %s \n" % ( filename[0], fwhm/0.183, 'no'))
f.close()
if opt.none:
return(-1)
prefix=''
if plant:
command="plant.csh ./ -rmin %s -rmax %s -ang %s -width %s " % ( opt.rmin, opt.rmax, opt.angle, opt.width)
try:
os.system(command)
except:
raise TaskError, 'plant exec. failed'
if not os.access('plant.OK',os.R_OK):
raise TaskError, 'plant failed'
prefix='fk'
#else:
# f=open('proc-these-files','w')
# for line in proc_these_files:
# f.write(line)
# f.flush()
# f.close()
if opt.rerun and os.access('find.OK',os.R_OK):
os.unlink("find.OK")
command="find.pl -p "+prefix+" -rn %s -rx %s -a %s -aw %s -d ./ " % ( opt.rmin, opt.rmax, opt.angle, opt.width)
#command="find.pl -p "+prefix+" -d ./ "
if opt.verbose:
sys.stderr.write( command )
try:
os.system(command)
except:
raise TaskErorr, "execute find"
if not os.access("find.OK",os.R_OK):
raise TaskError, "find failed"
### check the transformation file
command = "checktrans -p "+prefix
try:
os.system(command)
except:
raise TaskError, "execute checktrans"
if not os.access("checktrans.OK",os.R_OK):
raise TaskError, "checktrans failed"
if os.access("BAD_TRANS"+prefix,os.R_OK):
raise TaskError,"BAD TRANS"
astrom=prefix+filenames[0]+".cands.comb"
if opt.plant:
astrom=prefix+filenames[0]+".comb.found"
try:
#make sure we have +10 lines in this file
lines=file(astrom).readlines()
if len(lines)<10:
raise TaskError,"Too few Found"
except:
raise TaskError, "Error reading %s" %(astrom)
if os.access(astrom,os.R_OK):
return(1)
else:
return(0) | [
"def",
"searchTriples",
"(",
"expnums",
",",
"ccd",
",",
"plant",
"=",
"False",
")",
":",
"import",
"MOPfits",
",",
"os",
"import",
"MOPdbaccess",
"if",
"len",
"(",
"expnums",
")",
"!=",
"3",
":",
"raise",
"TaskError",
",",
"\"got %d exposures\"",
"%",
"... | Given a list of exposure numbers, find all the KBOs in that set of exposures | [
"Given",
"a",
"list",
"of",
"exposure",
"numbers",
"find",
"all",
"the",
"KBOs",
"in",
"that",
"set",
"of",
"exposures"
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/search2.py#L22-L185 |
OSSOS/MOP | src/jjk/preproc/search2.py | get_nailing | def get_nailing(expnum,ccd):
"""Get the 'nailing' images associated with expnum"""
sql="""
SELECT e.expnum, (e.mjdate - f.mjdate) dt
FROM bucket.exposure e
JOIN bucket.exposure f
JOIN bucket.association b ON b.expnum=f.expnum
JOIN bucket.association a ON a.pointing=b.pointing AND a.expnum=e.expnum
WHERE f.expnum=%d
AND abs(e.mjdate - f.mjdate) > 0.5
AND abs(e.mjdate - f.mjdate) < 15.0
ORDER BY abs(e.mjdate-f.mjdate)
""" % ( expnum )
try:
import MOPdbaccess
mysql=MOPdbaccess.connect('bucket','cfhls',dbSystem='MYSQL')
bucket=mysql.cursor()
bucket.execute(sql)
nailings = bucket.fetchall()
mysql.close()
if int(ccd) < 18:
cutout="[-*,-*]"
else:
cutout=None
import MOPfits
for nailing in nailings:
filename=MOPfits.adGet(str(nailing[0])+opt.raw,extno=int(ccd),cutout=cutout)
except:
raise TaskError, "get nailing failed" | python | def get_nailing(expnum,ccd):
"""Get the 'nailing' images associated with expnum"""
sql="""
SELECT e.expnum, (e.mjdate - f.mjdate) dt
FROM bucket.exposure e
JOIN bucket.exposure f
JOIN bucket.association b ON b.expnum=f.expnum
JOIN bucket.association a ON a.pointing=b.pointing AND a.expnum=e.expnum
WHERE f.expnum=%d
AND abs(e.mjdate - f.mjdate) > 0.5
AND abs(e.mjdate - f.mjdate) < 15.0
ORDER BY abs(e.mjdate-f.mjdate)
""" % ( expnum )
try:
import MOPdbaccess
mysql=MOPdbaccess.connect('bucket','cfhls',dbSystem='MYSQL')
bucket=mysql.cursor()
bucket.execute(sql)
nailings = bucket.fetchall()
mysql.close()
if int(ccd) < 18:
cutout="[-*,-*]"
else:
cutout=None
import MOPfits
for nailing in nailings:
filename=MOPfits.adGet(str(nailing[0])+opt.raw,extno=int(ccd),cutout=cutout)
except:
raise TaskError, "get nailing failed" | [
"def",
"get_nailing",
"(",
"expnum",
",",
"ccd",
")",
":",
"sql",
"=",
"\"\"\"\n SELECT e.expnum, (e.mjdate - f.mjdate) dt\n FROM bucket.exposure e\n JOIN bucket.exposure f\n JOIN bucket.association b ON b.expnum=f.expnum\n JOIN bucket.association a ON a.pointing=b.pointing AND a.... | Get the 'nailing' images associated with expnum | [
"Get",
"the",
"nailing",
"images",
"associated",
"with",
"expnum"
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/search2.py#L188-L216 |
JohnVinyard/zounds | zounds/spectral/frequencyscale.py | FrequencyBand.intersect | def intersect(self, other):
"""
Return the intersection between this frequency band and another.
Args:
other (FrequencyBand): the instance to intersect with
Examples::
>>> import zounds
>>> b1 = zounds.FrequencyBand(500, 1000)
>>> b2 = zounds.FrequencyBand(900, 2000)
>>> intersection = b1.intersect(b2)
>>> intersection.start_hz, intersection.stop_hz
(900, 1000)
"""
lowest_stop = min(self.stop_hz, other.stop_hz)
highest_start = max(self.start_hz, other.start_hz)
return FrequencyBand(highest_start, lowest_stop) | python | def intersect(self, other):
"""
Return the intersection between this frequency band and another.
Args:
other (FrequencyBand): the instance to intersect with
Examples::
>>> import zounds
>>> b1 = zounds.FrequencyBand(500, 1000)
>>> b2 = zounds.FrequencyBand(900, 2000)
>>> intersection = b1.intersect(b2)
>>> intersection.start_hz, intersection.stop_hz
(900, 1000)
"""
lowest_stop = min(self.stop_hz, other.stop_hz)
highest_start = max(self.start_hz, other.start_hz)
return FrequencyBand(highest_start, lowest_stop) | [
"def",
"intersect",
"(",
"self",
",",
"other",
")",
":",
"lowest_stop",
"=",
"min",
"(",
"self",
".",
"stop_hz",
",",
"other",
".",
"stop_hz",
")",
"highest_start",
"=",
"max",
"(",
"self",
".",
"start_hz",
",",
"other",
".",
"start_hz",
")",
"return",... | Return the intersection between this frequency band and another.
Args:
other (FrequencyBand): the instance to intersect with
Examples::
>>> import zounds
>>> b1 = zounds.FrequencyBand(500, 1000)
>>> b2 = zounds.FrequencyBand(900, 2000)
>>> intersection = b1.intersect(b2)
>>> intersection.start_hz, intersection.stop_hz
(900, 1000) | [
"Return",
"the",
"intersection",
"between",
"this",
"frequency",
"band",
"and",
"another",
"."
] | train | https://github.com/JohnVinyard/zounds/blob/337b3f98753d09eaab1c72dcd37bb852a3fa5ac6/zounds/spectral/frequencyscale.py#L66-L83 |
JohnVinyard/zounds | zounds/spectral/frequencyscale.py | FrequencyScale.bands | def bands(self):
"""
An iterable of all bands in this scale
"""
if self._bands is None:
self._bands = self._compute_bands()
return self._bands | python | def bands(self):
"""
An iterable of all bands in this scale
"""
if self._bands is None:
self._bands = self._compute_bands()
return self._bands | [
"def",
"bands",
"(",
"self",
")",
":",
"if",
"self",
".",
"_bands",
"is",
"None",
":",
"self",
".",
"_bands",
"=",
"self",
".",
"_compute_bands",
"(",
")",
"return",
"self",
".",
"_bands"
] | An iterable of all bands in this scale | [
"An",
"iterable",
"of",
"all",
"bands",
"in",
"this",
"scale"
] | train | https://github.com/JohnVinyard/zounds/blob/337b3f98753d09eaab1c72dcd37bb852a3fa5ac6/zounds/spectral/frequencyscale.py#L167-L173 |
JohnVinyard/zounds | zounds/spectral/frequencyscale.py | FrequencyScale.ensure_overlap_ratio | def ensure_overlap_ratio(self, required_ratio=0.5):
"""
Ensure that every adjacent pair of frequency bands meets the overlap
ratio criteria. This can be helpful in scenarios where a scale is
being used in an invertible transform, and something like the `constant
overlap add constraint
<https://ccrma.stanford.edu/~jos/sasp/Constant_Overlap_Add_COLA_Cases.html>`_
must be met in order to not introduce artifacts in the reconstruction.
Args:
required_ratio (float): The required overlap ratio between all
adjacent frequency band pairs
Raises:
AssertionError: when the overlap ratio for one or more adjacent
frequency band pairs is not met
"""
msg = \
'band {i}: ratio must be at least {required_ratio} but was {ratio}'
for i in range(0, len(self) - 1):
b1 = self[i]
b2 = self[i + 1]
try:
ratio = b1.intersection_ratio(b2)
except ValueError:
ratio = 0
if ratio < required_ratio:
raise AssertionError(msg.format(**locals())) | python | def ensure_overlap_ratio(self, required_ratio=0.5):
"""
Ensure that every adjacent pair of frequency bands meets the overlap
ratio criteria. This can be helpful in scenarios where a scale is
being used in an invertible transform, and something like the `constant
overlap add constraint
<https://ccrma.stanford.edu/~jos/sasp/Constant_Overlap_Add_COLA_Cases.html>`_
must be met in order to not introduce artifacts in the reconstruction.
Args:
required_ratio (float): The required overlap ratio between all
adjacent frequency band pairs
Raises:
AssertionError: when the overlap ratio for one or more adjacent
frequency band pairs is not met
"""
msg = \
'band {i}: ratio must be at least {required_ratio} but was {ratio}'
for i in range(0, len(self) - 1):
b1 = self[i]
b2 = self[i + 1]
try:
ratio = b1.intersection_ratio(b2)
except ValueError:
ratio = 0
if ratio < required_ratio:
raise AssertionError(msg.format(**locals())) | [
"def",
"ensure_overlap_ratio",
"(",
"self",
",",
"required_ratio",
"=",
"0.5",
")",
":",
"msg",
"=",
"'band {i}: ratio must be at least {required_ratio} but was {ratio}'",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"self",
")",
"-",
"1",
")",
":",
"... | Ensure that every adjacent pair of frequency bands meets the overlap
ratio criteria. This can be helpful in scenarios where a scale is
being used in an invertible transform, and something like the `constant
overlap add constraint
<https://ccrma.stanford.edu/~jos/sasp/Constant_Overlap_Add_COLA_Cases.html>`_
must be met in order to not introduce artifacts in the reconstruction.
Args:
required_ratio (float): The required overlap ratio between all
adjacent frequency band pairs
Raises:
AssertionError: when the overlap ratio for one or more adjacent
frequency band pairs is not met | [
"Ensure",
"that",
"every",
"adjacent",
"pair",
"of",
"frequency",
"bands",
"meets",
"the",
"overlap",
"ratio",
"criteria",
".",
"This",
"can",
"be",
"helpful",
"in",
"scenarios",
"where",
"a",
"scale",
"is",
"being",
"used",
"in",
"an",
"invertible",
"transf... | train | https://github.com/JohnVinyard/zounds/blob/337b3f98753d09eaab1c72dcd37bb852a3fa5ac6/zounds/spectral/frequencyscale.py#L207-L238 |
JohnVinyard/zounds | zounds/spectral/frequencyscale.py | FrequencyScale.Q | def Q(self):
"""
The quality factor of the scale, or, the ratio of center frequencies
to bandwidths
"""
return np.array(list(self.center_frequencies)) \
/ np.array(list(self.bandwidths)) | python | def Q(self):
"""
The quality factor of the scale, or, the ratio of center frequencies
to bandwidths
"""
return np.array(list(self.center_frequencies)) \
/ np.array(list(self.bandwidths)) | [
"def",
"Q",
"(",
"self",
")",
":",
"return",
"np",
".",
"array",
"(",
"list",
"(",
"self",
".",
"center_frequencies",
")",
")",
"/",
"np",
".",
"array",
"(",
"list",
"(",
"self",
".",
"bandwidths",
")",
")"
] | The quality factor of the scale, or, the ratio of center frequencies
to bandwidths | [
"The",
"quality",
"factor",
"of",
"the",
"scale",
"or",
"the",
"ratio",
"of",
"center",
"frequencies",
"to",
"bandwidths"
] | train | https://github.com/JohnVinyard/zounds/blob/337b3f98753d09eaab1c72dcd37bb852a3fa5ac6/zounds/spectral/frequencyscale.py#L241-L247 |
JohnVinyard/zounds | zounds/spectral/frequencyscale.py | FrequencyScale.get_slice | def get_slice(self, frequency_band):
"""
Given a frequency band, and a frequency dimension comprised of
n_samples, return a slice using integer indices that may be used to
extract only the frequency samples that intersect with the frequency
band
"""
index = frequency_band
if isinstance(index, slice):
types = {
index.start.__class__,
index.stop.__class__,
index.step.__class__
}
if Hertz not in types:
return index
try:
start = Hertz(0) if index.start is None else index.start
if start < Hertz(0):
start = self.stop_hz + start
stop = self.stop_hz if index.stop is None else index.stop
if stop < Hertz(0):
stop = self.stop_hz + stop
frequency_band = FrequencyBand(start, stop)
except (ValueError, TypeError):
pass
start_index = bisect.bisect_left(
self.band_stops, frequency_band.start_hz)
stop_index = bisect.bisect_left(
self.band_starts, frequency_band.stop_hz)
if self.always_even and (stop_index - start_index) % 2:
# KLUDGE: This is simple, but it may make sense to choose move the
# upper *or* lower bound, based on which one introduces a lower
# error
stop_index += 1
return slice(start_index, stop_index) | python | def get_slice(self, frequency_band):
"""
Given a frequency band, and a frequency dimension comprised of
n_samples, return a slice using integer indices that may be used to
extract only the frequency samples that intersect with the frequency
band
"""
index = frequency_band
if isinstance(index, slice):
types = {
index.start.__class__,
index.stop.__class__,
index.step.__class__
}
if Hertz not in types:
return index
try:
start = Hertz(0) if index.start is None else index.start
if start < Hertz(0):
start = self.stop_hz + start
stop = self.stop_hz if index.stop is None else index.stop
if stop < Hertz(0):
stop = self.stop_hz + stop
frequency_band = FrequencyBand(start, stop)
except (ValueError, TypeError):
pass
start_index = bisect.bisect_left(
self.band_stops, frequency_band.start_hz)
stop_index = bisect.bisect_left(
self.band_starts, frequency_band.stop_hz)
if self.always_even and (stop_index - start_index) % 2:
# KLUDGE: This is simple, but it may make sense to choose move the
# upper *or* lower bound, based on which one introduces a lower
# error
stop_index += 1
return slice(start_index, stop_index) | [
"def",
"get_slice",
"(",
"self",
",",
"frequency_band",
")",
":",
"index",
"=",
"frequency_band",
"if",
"isinstance",
"(",
"index",
",",
"slice",
")",
":",
"types",
"=",
"{",
"index",
".",
"start",
".",
"__class__",
",",
"index",
".",
"stop",
".",
"__c... | Given a frequency band, and a frequency dimension comprised of
n_samples, return a slice using integer indices that may be used to
extract only the frequency samples that intersect with the frequency
band | [
"Given",
"a",
"frequency",
"band",
"and",
"a",
"frequency",
"dimension",
"comprised",
"of",
"n_samples",
"return",
"a",
"slice",
"using",
"integer",
"indices",
"that",
"may",
"be",
"used",
"to",
"extract",
"only",
"the",
"frequency",
"samples",
"that",
"inters... | train | https://github.com/JohnVinyard/zounds/blob/337b3f98753d09eaab1c72dcd37bb852a3fa5ac6/zounds/spectral/frequencyscale.py#L289-L329 |
JohnVinyard/zounds | zounds/spectral/frequencyscale.py | LinearScale.from_sample_rate | def from_sample_rate(sample_rate, n_bands, always_even=False):
"""
Return a :class:`~zounds.spectral.LinearScale` instance whose upper
frequency bound is informed by the nyquist frequency of the sample rate.
Args:
sample_rate (SamplingRate): the sample rate whose nyquist frequency
will serve as the upper frequency bound of this scale
n_bands (int): the number of evenly-spaced frequency bands
"""
fb = FrequencyBand(0, sample_rate.nyquist)
return LinearScale(fb, n_bands, always_even=always_even) | python | def from_sample_rate(sample_rate, n_bands, always_even=False):
"""
Return a :class:`~zounds.spectral.LinearScale` instance whose upper
frequency bound is informed by the nyquist frequency of the sample rate.
Args:
sample_rate (SamplingRate): the sample rate whose nyquist frequency
will serve as the upper frequency bound of this scale
n_bands (int): the number of evenly-spaced frequency bands
"""
fb = FrequencyBand(0, sample_rate.nyquist)
return LinearScale(fb, n_bands, always_even=always_even) | [
"def",
"from_sample_rate",
"(",
"sample_rate",
",",
"n_bands",
",",
"always_even",
"=",
"False",
")",
":",
"fb",
"=",
"FrequencyBand",
"(",
"0",
",",
"sample_rate",
".",
"nyquist",
")",
"return",
"LinearScale",
"(",
"fb",
",",
"n_bands",
",",
"always_even",
... | Return a :class:`~zounds.spectral.LinearScale` instance whose upper
frequency bound is informed by the nyquist frequency of the sample rate.
Args:
sample_rate (SamplingRate): the sample rate whose nyquist frequency
will serve as the upper frequency bound of this scale
n_bands (int): the number of evenly-spaced frequency bands | [
"Return",
"a",
":",
"class",
":",
"~zounds",
".",
"spectral",
".",
"LinearScale",
"instance",
"whose",
"upper",
"frequency",
"bound",
"is",
"informed",
"by",
"the",
"nyquist",
"frequency",
"of",
"the",
"sample",
"rate",
"."
] | train | https://github.com/JohnVinyard/zounds/blob/337b3f98753d09eaab1c72dcd37bb852a3fa5ac6/zounds/spectral/frequencyscale.py#L387-L398 |
JohnVinyard/zounds | zounds/spectral/frequencyscale.py | ChromaScale._hz_to_semitones | def _hz_to_semitones(self, hz):
"""
Convert hertz into a number of semitones above or below some reference
value, in this case, A440
"""
return np.log(hz / self._a440) / np.log(self._a) | python | def _hz_to_semitones(self, hz):
"""
Convert hertz into a number of semitones above or below some reference
value, in this case, A440
"""
return np.log(hz / self._a440) / np.log(self._a) | [
"def",
"_hz_to_semitones",
"(",
"self",
",",
"hz",
")",
":",
"return",
"np",
".",
"log",
"(",
"hz",
"/",
"self",
".",
"_a440",
")",
"/",
"np",
".",
"log",
"(",
"self",
".",
"_a",
")"
] | Convert hertz into a number of semitones above or below some reference
value, in this case, A440 | [
"Convert",
"hertz",
"into",
"a",
"number",
"of",
"semitones",
"above",
"or",
"below",
"some",
"reference",
"value",
"in",
"this",
"case",
"A440"
] | train | https://github.com/JohnVinyard/zounds/blob/337b3f98753d09eaab1c72dcd37bb852a3fa5ac6/zounds/spectral/frequencyscale.py#L592-L597 |
OSSOS/MOP | src/jjk/preproc/cfhtCutout.py | resolve | def resolve(object):
"""Look up the name of a source using a resolver"""
import re
sesame_cmd = 'curl -s http://cdsweb.u-strasbg.fr/viz-bin/nph-sesame/-oI?'+string.replace(object,' ','')
f = os.popen(sesame_cmd)
lines = f.readlines()
f.close()
for line in lines:
if re.search('%J ', line):
result2 = line.split()
ra_deg = float(result2[1])
dec_deg = float(result2[2])
return (ra_deg, dec_deg)
return (0,0) | python | def resolve(object):
"""Look up the name of a source using a resolver"""
import re
sesame_cmd = 'curl -s http://cdsweb.u-strasbg.fr/viz-bin/nph-sesame/-oI?'+string.replace(object,' ','')
f = os.popen(sesame_cmd)
lines = f.readlines()
f.close()
for line in lines:
if re.search('%J ', line):
result2 = line.split()
ra_deg = float(result2[1])
dec_deg = float(result2[2])
return (ra_deg, dec_deg)
return (0,0) | [
"def",
"resolve",
"(",
"object",
")",
":",
"import",
"re",
"sesame_cmd",
"=",
"'curl -s http://cdsweb.u-strasbg.fr/viz-bin/nph-sesame/-oI?'",
"+",
"string",
".",
"replace",
"(",
"object",
",",
"' '",
",",
"''",
")",
"f",
"=",
"os",
".",
"popen",
"(",
"sesame_c... | Look up the name of a source using a resolver | [
"Look",
"up",
"the",
"name",
"of",
"a",
"source",
"using",
"a",
"resolver"
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/cfhtCutout.py#L74-L90 |
OSSOS/MOP | src/ossos/core/ossos/planning/plotting/plot_fanciness.py | remove_border | def remove_border(axes=None, keep=('left', 'bottom'), remove=('right', 'top'), labelcol=ALMOST_BLACK):
"""
Minimize chart junk by stripping out unnecessary plot borders and axis ticks.
The top/right/left/bottom keywords toggle whether the corresponding plot border is drawn
"""
ax = axes or plt.gca()
for spine in remove:
ax.spines[spine].set_visible(False)
for spine in keep:
ax.spines[spine].set_linewidth(0.5)
# ax.spines[spine].set_color('white')
# remove all ticks, then add back the ones in keep
# Does this also need to specify the ticks' colour, given the axes/labels are changed?
ax.yaxis.set_ticks_position('none')
ax.xaxis.set_ticks_position('none')
# ax.xaxis.set_ticklabels("")
# ax.yaxis.set_ticklabels("")
for spine in keep:
if spine == 'top':
ax.xaxis.tick_top()
if spine == 'bottom':
ax.xaxis.tick_bottom()
# match the label colour to that of the axes
ax.xaxis.label.set_color(labelcol)
ax.xaxis.set_tick_params(color=labelcol, labelcolor=labelcol)
if spine == 'left':
ax.yaxis.tick_left()
ax.yaxis.label.set_color(labelcol)
ax.yaxis.set_tick_params(color=labelcol, labelcolor=labelcol)
if spine == 'right':
ax.yaxis.tick_right()
return | python | def remove_border(axes=None, keep=('left', 'bottom'), remove=('right', 'top'), labelcol=ALMOST_BLACK):
"""
Minimize chart junk by stripping out unnecessary plot borders and axis ticks.
The top/right/left/bottom keywords toggle whether the corresponding plot border is drawn
"""
ax = axes or plt.gca()
for spine in remove:
ax.spines[spine].set_visible(False)
for spine in keep:
ax.spines[spine].set_linewidth(0.5)
# ax.spines[spine].set_color('white')
# remove all ticks, then add back the ones in keep
# Does this also need to specify the ticks' colour, given the axes/labels are changed?
ax.yaxis.set_ticks_position('none')
ax.xaxis.set_ticks_position('none')
# ax.xaxis.set_ticklabels("")
# ax.yaxis.set_ticklabels("")
for spine in keep:
if spine == 'top':
ax.xaxis.tick_top()
if spine == 'bottom':
ax.xaxis.tick_bottom()
# match the label colour to that of the axes
ax.xaxis.label.set_color(labelcol)
ax.xaxis.set_tick_params(color=labelcol, labelcolor=labelcol)
if spine == 'left':
ax.yaxis.tick_left()
ax.yaxis.label.set_color(labelcol)
ax.yaxis.set_tick_params(color=labelcol, labelcolor=labelcol)
if spine == 'right':
ax.yaxis.tick_right()
return | [
"def",
"remove_border",
"(",
"axes",
"=",
"None",
",",
"keep",
"=",
"(",
"'left'",
",",
"'bottom'",
")",
",",
"remove",
"=",
"(",
"'right'",
",",
"'top'",
")",
",",
"labelcol",
"=",
"ALMOST_BLACK",
")",
":",
"ax",
"=",
"axes",
"or",
"plt",
".",
"gc... | Minimize chart junk by stripping out unnecessary plot borders and axis ticks.
The top/right/left/bottom keywords toggle whether the corresponding plot border is drawn | [
"Minimize",
"chart",
"junk",
"by",
"stripping",
"out",
"unnecessary",
"plot",
"borders",
"and",
"axis",
"ticks",
".",
"The",
"top",
"/",
"right",
"/",
"left",
"/",
"bottom",
"keywords",
"toggle",
"whether",
"the",
"corresponding",
"plot",
"border",
"is",
"dr... | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/plotting/plot_fanciness.py#L32-L66 |
OSSOS/MOP | src/jjk/preproc/demo.py | handle_exit_code | def handle_exit_code(d, code):
"""Sample function showing how to interpret the dialog exit codes.
This function is not used after every call to dialog in this demo
for two reasons:
1. For some boxes, unfortunately, dialog returns the code for
ERROR when the user presses ESC (instead of the one chosen
for ESC). As these boxes only have an OK button, and an
exception is raised and correctly handled here in case of
real dialog errors, there is no point in testing the dialog
exit status (it can't be CANCEL as there is no CANCEL
button; it can't be ESC as unfortunately, the dialog makes
it appear as an error; it can't be ERROR as this is handled
in dialog.py to raise an exception; therefore, it *is* OK).
2. To not clutter simple code with things that are
demonstrated elsewhere.
"""
# d is supposed to be a Dialog instance
if code in (d.DIALOG_CANCEL, d.DIALOG_ESC):
if code == d.DIALOG_CANCEL:
msg = "You chose cancel in the last dialog box. Do you want to " \
"exit this demo?"
else:
msg = "You pressed ESC in the last dialog box. Do you want to " \
"exit this demo?"
# "No" or "ESC" will bring the user back to the demo.
# DIALOG_ERROR is propagated as an exception and caught in main().
# So we only need to handle OK here.
if d.yesno(msg) == d.DIALOG_OK:
sys.exit(0)
return 0
else:
return 1 | python | def handle_exit_code(d, code):
"""Sample function showing how to interpret the dialog exit codes.
This function is not used after every call to dialog in this demo
for two reasons:
1. For some boxes, unfortunately, dialog returns the code for
ERROR when the user presses ESC (instead of the one chosen
for ESC). As these boxes only have an OK button, and an
exception is raised and correctly handled here in case of
real dialog errors, there is no point in testing the dialog
exit status (it can't be CANCEL as there is no CANCEL
button; it can't be ESC as unfortunately, the dialog makes
it appear as an error; it can't be ERROR as this is handled
in dialog.py to raise an exception; therefore, it *is* OK).
2. To not clutter simple code with things that are
demonstrated elsewhere.
"""
# d is supposed to be a Dialog instance
if code in (d.DIALOG_CANCEL, d.DIALOG_ESC):
if code == d.DIALOG_CANCEL:
msg = "You chose cancel in the last dialog box. Do you want to " \
"exit this demo?"
else:
msg = "You pressed ESC in the last dialog box. Do you want to " \
"exit this demo?"
# "No" or "ESC" will bring the user back to the demo.
# DIALOG_ERROR is propagated as an exception and caught in main().
# So we only need to handle OK here.
if d.yesno(msg) == d.DIALOG_OK:
sys.exit(0)
return 0
else:
return 1 | [
"def",
"handle_exit_code",
"(",
"d",
",",
"code",
")",
":",
"# d is supposed to be a Dialog instance",
"if",
"code",
"in",
"(",
"d",
".",
"DIALOG_CANCEL",
",",
"d",
".",
"DIALOG_ESC",
")",
":",
"if",
"code",
"==",
"d",
".",
"DIALOG_CANCEL",
":",
"msg",
"="... | Sample function showing how to interpret the dialog exit codes.
This function is not used after every call to dialog in this demo
for two reasons:
1. For some boxes, unfortunately, dialog returns the code for
ERROR when the user presses ESC (instead of the one chosen
for ESC). As these boxes only have an OK button, and an
exception is raised and correctly handled here in case of
real dialog errors, there is no point in testing the dialog
exit status (it can't be CANCEL as there is no CANCEL
button; it can't be ESC as unfortunately, the dialog makes
it appear as an error; it can't be ERROR as this is handled
in dialog.py to raise an exception; therefore, it *is* OK).
2. To not clutter simple code with things that are
demonstrated elsewhere. | [
"Sample",
"function",
"showing",
"how",
"to",
"interpret",
"the",
"dialog",
"exit",
"codes",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/demo.py#L28-L63 |
OSSOS/MOP | src/jjk/preproc/demo.py | main | def main():
"""This demo shows the main features of the pythondialog Dialog class.
"""
try:
demo()
except dialog.error, exc_instance:
sys.stderr.write("Error:\n\n%s\n" % exc_instance.complete_message())
sys.exit(1)
sys.exit(0) | python | def main():
"""This demo shows the main features of the pythondialog Dialog class.
"""
try:
demo()
except dialog.error, exc_instance:
sys.stderr.write("Error:\n\n%s\n" % exc_instance.complete_message())
sys.exit(1)
sys.exit(0) | [
"def",
"main",
"(",
")",
":",
"try",
":",
"demo",
"(",
")",
"except",
"dialog",
".",
"error",
",",
"exc_instance",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"\"Error:\\n\\n%s\\n\"",
"%",
"exc_instance",
".",
"complete_message",
"(",
")",
")",
"sys",
... | This demo shows the main features of the pythondialog Dialog class. | [
"This",
"demo",
"shows",
"the",
"main",
"features",
"of",
"the",
"pythondialog",
"Dialog",
"class",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/demo.py#L307-L317 |
OSSOS/MOP | src/jjk/preproc/s2.py | searchTriples | def searchTriples(expnums,ccd):
"""Given a list of exposure numbers, find all the KBOs in that set of exposures"""
import MOPfits,os
import MOPdbaccess
if len(expnums)!=3:
return(-1)
### Some program Constants
proc_file = open("proc-these-files","w")
proc_file.write("# Files to be planted and searched\n")
proc_file.write("# image fwhm plant\n")
import string
import os.path
filenames=[]
import pyfits
for expnum in expnums:
try:
mysql=MOPdbaccess.connect('bucket','cfhls','MYSQL')
bucket=mysql.cursor()
except:
raise TaskError, "mysql failed"
bucket.execute("SELECT obs_iq_refccd FROM exposure WHERE expnum=%s" , (expnum, ) )
row=bucket.fetchone()
mysql.close()
fwhm=row[0]
if not fwhm > 0:
fwhm=1.0
if int(ccd)<18:
cutout="[-*,-*]"
else:
cutout=None
filename=MOPfits.adGet(str(expnum)+"p",extno=int(ccd),cutout=cutout)
if not os.access(filename,os.R_OK):
raise TaskError, 'adGet Failed'
filename=os.path.splitext(filename)
filenames.append(filename[0])
proc_file.write("%s %f %s \n" % ( filename[0], fwhm/0.183, "no"))
proc_file.flush()
proc_file.close()
command="find.pl -p '' -d ./ "
try:
os.system(command)
except:
raise TaskErorr, "execute find"
file_extens=[
"cands.comb",
"measure3.cands.astrom",
"measure3.WARNING",
"measure3.astrom.scatter"]
if not os.access("find.OK",os.R_OK):
raise TaskError, "find failed"
astrom=filenames[0]+".measure3.cands.astrom"
if os.access(astrom,os.R_OK):
return(1)
else:
return(0) | python | def searchTriples(expnums,ccd):
"""Given a list of exposure numbers, find all the KBOs in that set of exposures"""
import MOPfits,os
import MOPdbaccess
if len(expnums)!=3:
return(-1)
### Some program Constants
proc_file = open("proc-these-files","w")
proc_file.write("# Files to be planted and searched\n")
proc_file.write("# image fwhm plant\n")
import string
import os.path
filenames=[]
import pyfits
for expnum in expnums:
try:
mysql=MOPdbaccess.connect('bucket','cfhls','MYSQL')
bucket=mysql.cursor()
except:
raise TaskError, "mysql failed"
bucket.execute("SELECT obs_iq_refccd FROM exposure WHERE expnum=%s" , (expnum, ) )
row=bucket.fetchone()
mysql.close()
fwhm=row[0]
if not fwhm > 0:
fwhm=1.0
if int(ccd)<18:
cutout="[-*,-*]"
else:
cutout=None
filename=MOPfits.adGet(str(expnum)+"p",extno=int(ccd),cutout=cutout)
if not os.access(filename,os.R_OK):
raise TaskError, 'adGet Failed'
filename=os.path.splitext(filename)
filenames.append(filename[0])
proc_file.write("%s %f %s \n" % ( filename[0], fwhm/0.183, "no"))
proc_file.flush()
proc_file.close()
command="find.pl -p '' -d ./ "
try:
os.system(command)
except:
raise TaskErorr, "execute find"
file_extens=[
"cands.comb",
"measure3.cands.astrom",
"measure3.WARNING",
"measure3.astrom.scatter"]
if not os.access("find.OK",os.R_OK):
raise TaskError, "find failed"
astrom=filenames[0]+".measure3.cands.astrom"
if os.access(astrom,os.R_OK):
return(1)
else:
return(0) | [
"def",
"searchTriples",
"(",
"expnums",
",",
"ccd",
")",
":",
"import",
"MOPfits",
",",
"os",
"import",
"MOPdbaccess",
"if",
"len",
"(",
"expnums",
")",
"!=",
"3",
":",
"return",
"(",
"-",
"1",
")",
"### Some program Constants",
"proc_file",
"=",
"open",
... | Given a list of exposure numbers, find all the KBOs in that set of exposures | [
"Given",
"a",
"list",
"of",
"exposure",
"numbers",
"find",
"all",
"the",
"KBOs",
"in",
"that",
"set",
"of",
"exposures"
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/s2.py#L19-L90 |
OSSOS/MOP | src/jjk/preproc/MOPconf.py | discands | def discands(record):
"""Display the candidates contained in a candidate record list"""
import pyfits
pyraf.iraf.images()
pyraf.iraf.images.tv()
display = pyraf.iraf.images.tv.display
width=128
cands = record['cands']
exps= record['fileId']
comments= record['comments']
### load some header info from the mophead file
headers={}
for exp in exps:
f = pyfits.open(exp+".fits")
headers[exp]={}
for key in ['MJDATE', 'NAXIS1', 'NAXIS2', 'EXPTIME', 'FILTER']:
headers[exp][key]=f[0].header[key]
headers[exp]['MJD-OBSC']=headers[exp]['MJDATE']+headers[exp]['EXPTIME']/2.0/3600.0/24.0
f.close()
import math,os
real=0
cand_total=len(cands)
if cand_total > 100 :
sys.stderr.write("Too many candidates (%d) skipping this chip...\n" % ( cand_total))
return -1
for cand in cands:
cand_total=cand_total-1
x2=[]
y2=[]
y1=[]
x1=[]
for i in range(len(exps)):
#print exps[i]
fileId=exps[i]
x2.append(int(min(math.floor(cand[i]['x'])+width,headers[fileId]['NAXIS1'])))
y2.append(int(min(math.floor(cand[i]['y'])+width,headers[fileId]['NAXIS2'])))
x1.append(int(max(math.floor(cand[i]['x'])-width,1)))
y1.append(int(max(math.floor(cand[i]['y'])-width,1)))
#print exps
#print x1,x2
#print y1,y2
x_1 = min(x1)
y_1 = min(y1)
x_2 = max(x2)
y_2 = max(y2)
ans='f'
fake=True
xshifts=[]
yshifts=[]
first_pass=True
show_coords=True
ans='j'
while ( first_pass or ans == 'f' or ans == 'c') :
first_pass=False
if ans=='c':
if show_coords:
show_coords=False
else:
show_coords=True
if ans=='f':
if fake:
fake=False
else:
fake=True
for i in range(len(exps)):
xshift=cand[i]['x']-cand[i]['x_0']
yshift=cand[i]['y']-cand[i]['y_0']
allmark=open('all.coo','w')
for j in range(len(exps)):
if j==i :
continue
allmark.write('%f %f\n' % ( cand[j]['x_0']+xshift, cand[j]['y_0']+yshift))
allmark.close()
tvmark=open('tv.coo','w')
tvmark.write('%f %f %d\n' % ( cand[i]['x'], cand[i]['y'], cand_total))
x1=max(x_1 + xshift,1)
y1=max(y_1 + yshift,1)
x2=min(x_2 + xshift,headers[exps[i]]['NAXIS1'])
y2=min(y_2 + yshift,headers[exps[i]]['NAXIS2'])
cutout = "[%d:%d,%d:%d]" % (x1,x2,y1,y2)
if not fake:
if exps[i][0:2]=='fk' :
fileId=exps[i][2:]
else:
fileId=exps[i]
else:
fileId=exps[i]
# sys.stdout.write("---> ",fileId+cutout,xshift,yshift
try:
junk=display(fileId+cutout,i+1,Stdout=1)
except:
sys.stderr.write("ERROR\n")
tvmark.close()
if show_coords:
pyraf.iraf.images.tv.tvmark(i+1,'all.coo',mark='circle',radii=10,color=205)
pyraf.iraf.images.tv.tvmark(i+1,'tv.coo',mark='circle',radii=8,color=204,label='yes',nxoffset=10)
os.unlink('tv.coo')
os.unlink('all.coo')
### ask if this is a real candidate, take action.
ans='j'
while ans not in ('y', 'n', 'q', 's', 'f', 'c'):
ans=raw_input("[%d] (y,n,s,q,c,f) : " % ( cand_total))
if ans=='y':
if os.access("cands.REAL",os.W_OK):
f=open("cands.REAL","a")
else:
f=open("cands.REAL","w")
for comment in comments:
f.write(comment)
cols=['x','y','x_0','y_0','flux','size','max_int','elon']
#for col in cols:
# f.write("%s\t" % col)
f.write("\n")
for ii in range(len(exps)):
for col in cols:
f.write("%8.2f\t" % cand[ii][col])
f.write("\n")
f.close()
real=real+1
if ans=='q':
return -2
if ans=='s':
return -1
return real | python | def discands(record):
"""Display the candidates contained in a candidate record list"""
import pyfits
pyraf.iraf.images()
pyraf.iraf.images.tv()
display = pyraf.iraf.images.tv.display
width=128
cands = record['cands']
exps= record['fileId']
comments= record['comments']
### load some header info from the mophead file
headers={}
for exp in exps:
f = pyfits.open(exp+".fits")
headers[exp]={}
for key in ['MJDATE', 'NAXIS1', 'NAXIS2', 'EXPTIME', 'FILTER']:
headers[exp][key]=f[0].header[key]
headers[exp]['MJD-OBSC']=headers[exp]['MJDATE']+headers[exp]['EXPTIME']/2.0/3600.0/24.0
f.close()
import math,os
real=0
cand_total=len(cands)
if cand_total > 100 :
sys.stderr.write("Too many candidates (%d) skipping this chip...\n" % ( cand_total))
return -1
for cand in cands:
cand_total=cand_total-1
x2=[]
y2=[]
y1=[]
x1=[]
for i in range(len(exps)):
#print exps[i]
fileId=exps[i]
x2.append(int(min(math.floor(cand[i]['x'])+width,headers[fileId]['NAXIS1'])))
y2.append(int(min(math.floor(cand[i]['y'])+width,headers[fileId]['NAXIS2'])))
x1.append(int(max(math.floor(cand[i]['x'])-width,1)))
y1.append(int(max(math.floor(cand[i]['y'])-width,1)))
#print exps
#print x1,x2
#print y1,y2
x_1 = min(x1)
y_1 = min(y1)
x_2 = max(x2)
y_2 = max(y2)
ans='f'
fake=True
xshifts=[]
yshifts=[]
first_pass=True
show_coords=True
ans='j'
while ( first_pass or ans == 'f' or ans == 'c') :
first_pass=False
if ans=='c':
if show_coords:
show_coords=False
else:
show_coords=True
if ans=='f':
if fake:
fake=False
else:
fake=True
for i in range(len(exps)):
xshift=cand[i]['x']-cand[i]['x_0']
yshift=cand[i]['y']-cand[i]['y_0']
allmark=open('all.coo','w')
for j in range(len(exps)):
if j==i :
continue
allmark.write('%f %f\n' % ( cand[j]['x_0']+xshift, cand[j]['y_0']+yshift))
allmark.close()
tvmark=open('tv.coo','w')
tvmark.write('%f %f %d\n' % ( cand[i]['x'], cand[i]['y'], cand_total))
x1=max(x_1 + xshift,1)
y1=max(y_1 + yshift,1)
x2=min(x_2 + xshift,headers[exps[i]]['NAXIS1'])
y2=min(y_2 + yshift,headers[exps[i]]['NAXIS2'])
cutout = "[%d:%d,%d:%d]" % (x1,x2,y1,y2)
if not fake:
if exps[i][0:2]=='fk' :
fileId=exps[i][2:]
else:
fileId=exps[i]
else:
fileId=exps[i]
# sys.stdout.write("---> ",fileId+cutout,xshift,yshift
try:
junk=display(fileId+cutout,i+1,Stdout=1)
except:
sys.stderr.write("ERROR\n")
tvmark.close()
if show_coords:
pyraf.iraf.images.tv.tvmark(i+1,'all.coo',mark='circle',radii=10,color=205)
pyraf.iraf.images.tv.tvmark(i+1,'tv.coo',mark='circle',radii=8,color=204,label='yes',nxoffset=10)
os.unlink('tv.coo')
os.unlink('all.coo')
### ask if this is a real candidate, take action.
ans='j'
while ans not in ('y', 'n', 'q', 's', 'f', 'c'):
ans=raw_input("[%d] (y,n,s,q,c,f) : " % ( cand_total))
if ans=='y':
if os.access("cands.REAL",os.W_OK):
f=open("cands.REAL","a")
else:
f=open("cands.REAL","w")
for comment in comments:
f.write(comment)
cols=['x','y','x_0','y_0','flux','size','max_int','elon']
#for col in cols:
# f.write("%s\t" % col)
f.write("\n")
for ii in range(len(exps)):
for col in cols:
f.write("%8.2f\t" % cand[ii][col])
f.write("\n")
f.close()
real=real+1
if ans=='q':
return -2
if ans=='s':
return -1
return real | [
"def",
"discands",
"(",
"record",
")",
":",
"import",
"pyfits",
"pyraf",
".",
"iraf",
".",
"images",
"(",
")",
"pyraf",
".",
"iraf",
".",
"images",
".",
"tv",
"(",
")",
"display",
"=",
"pyraf",
".",
"iraf",
".",
"images",
".",
"tv",
".",
"display",... | Display the candidates contained in a candidate record list | [
"Display",
"the",
"candidates",
"contained",
"in",
"a",
"candidate",
"record",
"list"
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/MOPconf.py#L49-L176 |
OSSOS/MOP | src/ossos/core/ossos/planning/optimize_pointings.py | is_up | def is_up(coordinate, current_time):
"""
Given the position and time determin if the given target is up.
@param coordinate: the J2000 location of the source
@param current_time: The time of the observations
@return: True/False
"""
cfht.date = current_time.iso.replace('-', '/')
cfht.horizon = math.radians(-7)
sun.compute(cfht)
sun_rise = Time(str(sun.rise_time).replace('/', '-'))
sun_set = Time(str(sun.set_time).replace('/', '-'))
if current_time < sun_set or current_time > sun_rise:
return False
fb._ra = coordinate.ra.radian
fb._dec = coordinate.dec.radian
cfht.horizon = math.radians(40)
fb.compute(cfht)
fb_rise_time = Time(str(fb.rise_time).replace('/', '-'))
fb_set_time = Time(str(fb.set_time).replace('/', '-'))
if (current_time > fb_set_time > fb_set_time or
fb_rise_time > current_time > fb_set_time):
return False
return True | python | def is_up(coordinate, current_time):
"""
Given the position and time determin if the given target is up.
@param coordinate: the J2000 location of the source
@param current_time: The time of the observations
@return: True/False
"""
cfht.date = current_time.iso.replace('-', '/')
cfht.horizon = math.radians(-7)
sun.compute(cfht)
sun_rise = Time(str(sun.rise_time).replace('/', '-'))
sun_set = Time(str(sun.set_time).replace('/', '-'))
if current_time < sun_set or current_time > sun_rise:
return False
fb._ra = coordinate.ra.radian
fb._dec = coordinate.dec.radian
cfht.horizon = math.radians(40)
fb.compute(cfht)
fb_rise_time = Time(str(fb.rise_time).replace('/', '-'))
fb_set_time = Time(str(fb.set_time).replace('/', '-'))
if (current_time > fb_set_time > fb_set_time or
fb_rise_time > current_time > fb_set_time):
return False
return True | [
"def",
"is_up",
"(",
"coordinate",
",",
"current_time",
")",
":",
"cfht",
".",
"date",
"=",
"current_time",
".",
"iso",
".",
"replace",
"(",
"'-'",
",",
"'/'",
")",
"cfht",
".",
"horizon",
"=",
"math",
".",
"radians",
"(",
"-",
"7",
")",
"sun",
"."... | Given the position and time determin if the given target is up.
@param coordinate: the J2000 location of the source
@param current_time: The time of the observations
@return: True/False | [
"Given",
"the",
"position",
"and",
"time",
"determin",
"if",
"the",
"given",
"target",
"is",
"up",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/ossos/planning/optimize_pointings.py#L27-L54 |
OSSOS/MOP | src/jjk/www_scripts/TapQuery.py | TAPQuery | def TAPQuery(query):
"""The __main__ part of the script"""
tapURL = "http://cadc-ccda.hia-iha.nrc-cnrc.gc.ca/tap/sync"
## Some default parameters for that TAP service queries.
tapParams={'REQUEST': 'doQuery',
'LANG': 'ADQL',
'FORMAT': 'votable',
'QUERY': query}
cnt=0
while True:
try:
print "running query"
r=urllib2.urlopen(tapURL,urllib.urlencode(tapParams))
return r
except urllib2.HTTPError, e:
cnt+=1
if e.code!=503:
sys.stderr.write("# TAP Query got Code: %s Attempt: %d (exiting)\n" % (str(e.code),cnt))
sys.exit(-1)
sys.stderr.write("# TAP Query got Code: %s Attempt: %d (sleeping for 10)\n" % (str(e.code),cnt))
time.sleep(10) | python | def TAPQuery(query):
"""The __main__ part of the script"""
tapURL = "http://cadc-ccda.hia-iha.nrc-cnrc.gc.ca/tap/sync"
## Some default parameters for that TAP service queries.
tapParams={'REQUEST': 'doQuery',
'LANG': 'ADQL',
'FORMAT': 'votable',
'QUERY': query}
cnt=0
while True:
try:
print "running query"
r=urllib2.urlopen(tapURL,urllib.urlencode(tapParams))
return r
except urllib2.HTTPError, e:
cnt+=1
if e.code!=503:
sys.stderr.write("# TAP Query got Code: %s Attempt: %d (exiting)\n" % (str(e.code),cnt))
sys.exit(-1)
sys.stderr.write("# TAP Query got Code: %s Attempt: %d (sleeping for 10)\n" % (str(e.code),cnt))
time.sleep(10) | [
"def",
"TAPQuery",
"(",
"query",
")",
":",
"tapURL",
"=",
"\"http://cadc-ccda.hia-iha.nrc-cnrc.gc.ca/tap/sync\"",
"## Some default parameters for that TAP service queries.",
"tapParams",
"=",
"{",
"'REQUEST'",
":",
"'doQuery'",
",",
"'LANG'",
":",
"'ADQL'",
",",
"'FORMAT'",... | The __main__ part of the script | [
"The",
"__main__",
"part",
"of",
"the",
"script"
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/www_scripts/TapQuery.py#L28-L51 |
OSSOS/MOP | src/ossos/core/scripts/step1.py | step1 | def step1(expnum,
ccd,
prefix='',
version='p',
sex_thresh=_SEX_THRESHOLD,
wave_thresh=_WAVE_THRESHOLD,
maxcount=_MAX_COUNT,
dry_run=False):
"""run the actual step1jmp/matt codes.
expnum: the CFHT expousre to process
ccd: which ccd in the mosaic to process
fwhm: the image quality, FWHM, of the image. In pixels.
sex_thresh: the detection threhold to run sExtractor at
wave_thresh: the detection threshold for wavelet
maxcount: saturation level
"""
storage.get_file(expnum, ccd, prefix=prefix, version=version, ext='mopheader')
filename = storage.get_image(expnum, ccd, version=version, prefix=prefix)
fwhm = storage.get_fwhm(expnum, ccd, prefix=prefix, version=version)
basename = os.path.splitext(filename)[0]
logging.info(util.exec_prog(['step1jmp',
'-f', basename,
'-t', str(wave_thresh),
'-w', str(fwhm),
'-m', str(maxcount)]))
obj_uri = storage.get_uri(expnum, ccd, version=version, ext='obj.jmp',
prefix=prefix)
obj_filename = basename + ".obj.jmp"
if not dry_run:
storage.copy(obj_filename, obj_uri)
## for step1matt we need the weight image
hdulist = fits.open(filename)
flat_name = hdulist[0].header.get('FLAT', 'weight.fits')
parts = os.path.splitext(flat_name)
if parts[1] == '.fz':
flat_name = os.path.splitext(parts[0])[0]
else:
flat_name = parts[0]
try:
flat_filename = storage.get_image(flat_name, ccd, version='', ext='fits', subdir='calibrators')
except:
flat_filename = storage.get_image(flat_name, ccd, version='', ext='fits', subdir='old_calibrators')
if os.access('weight.fits', os.R_OK):
os.unlink('weight.fits')
if not os.access('weight.fits', os.R_OK):
os.symlink(flat_filename, 'weight.fits')
logging.info(util.exec_prog(['step1matt',
'-f', basename,
'-t', str(sex_thresh),
'-w', str(fwhm),
'-m', str(maxcount)]))
if os.access('weight.fits', os.R_OK):
os.unlink('weight.fits')
obj_uri = storage.get_uri(expnum, ccd, version=version, ext='obj.matt',
prefix=prefix)
obj_filename = basename + ".obj.matt"
if not dry_run:
storage.copy(obj_filename, obj_uri)
return True | python | def step1(expnum,
ccd,
prefix='',
version='p',
sex_thresh=_SEX_THRESHOLD,
wave_thresh=_WAVE_THRESHOLD,
maxcount=_MAX_COUNT,
dry_run=False):
"""run the actual step1jmp/matt codes.
expnum: the CFHT expousre to process
ccd: which ccd in the mosaic to process
fwhm: the image quality, FWHM, of the image. In pixels.
sex_thresh: the detection threhold to run sExtractor at
wave_thresh: the detection threshold for wavelet
maxcount: saturation level
"""
storage.get_file(expnum, ccd, prefix=prefix, version=version, ext='mopheader')
filename = storage.get_image(expnum, ccd, version=version, prefix=prefix)
fwhm = storage.get_fwhm(expnum, ccd, prefix=prefix, version=version)
basename = os.path.splitext(filename)[0]
logging.info(util.exec_prog(['step1jmp',
'-f', basename,
'-t', str(wave_thresh),
'-w', str(fwhm),
'-m', str(maxcount)]))
obj_uri = storage.get_uri(expnum, ccd, version=version, ext='obj.jmp',
prefix=prefix)
obj_filename = basename + ".obj.jmp"
if not dry_run:
storage.copy(obj_filename, obj_uri)
## for step1matt we need the weight image
hdulist = fits.open(filename)
flat_name = hdulist[0].header.get('FLAT', 'weight.fits')
parts = os.path.splitext(flat_name)
if parts[1] == '.fz':
flat_name = os.path.splitext(parts[0])[0]
else:
flat_name = parts[0]
try:
flat_filename = storage.get_image(flat_name, ccd, version='', ext='fits', subdir='calibrators')
except:
flat_filename = storage.get_image(flat_name, ccd, version='', ext='fits', subdir='old_calibrators')
if os.access('weight.fits', os.R_OK):
os.unlink('weight.fits')
if not os.access('weight.fits', os.R_OK):
os.symlink(flat_filename, 'weight.fits')
logging.info(util.exec_prog(['step1matt',
'-f', basename,
'-t', str(sex_thresh),
'-w', str(fwhm),
'-m', str(maxcount)]))
if os.access('weight.fits', os.R_OK):
os.unlink('weight.fits')
obj_uri = storage.get_uri(expnum, ccd, version=version, ext='obj.matt',
prefix=prefix)
obj_filename = basename + ".obj.matt"
if not dry_run:
storage.copy(obj_filename, obj_uri)
return True | [
"def",
"step1",
"(",
"expnum",
",",
"ccd",
",",
"prefix",
"=",
"''",
",",
"version",
"=",
"'p'",
",",
"sex_thresh",
"=",
"_SEX_THRESHOLD",
",",
"wave_thresh",
"=",
"_WAVE_THRESHOLD",
",",
"maxcount",
"=",
"_MAX_COUNT",
",",
"dry_run",
"=",
"False",
")",
... | run the actual step1jmp/matt codes.
expnum: the CFHT expousre to process
ccd: which ccd in the mosaic to process
fwhm: the image quality, FWHM, of the image. In pixels.
sex_thresh: the detection threhold to run sExtractor at
wave_thresh: the detection threshold for wavelet
maxcount: saturation level | [
"run",
"the",
"actual",
"step1jmp",
"/",
"matt",
"codes",
"."
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/core/scripts/step1.py#L45-L117 |
openstack/python-scciclient | scciclient/irmc/viom/elcm.py | VIOMTable.get_json | def get_json(self):
"""Create JSON data for AdapterConfig.
:returns: JSON data as follows:
{
"VIOMManage":{
},
"InitBoot":{
},
"UseVirtualAddresses":{
},
"BootMenuEnable":{
},
"SmuxSetting":{
},
"Slots":{
}
}
"""
viom_table = self.get_basic_json()
if self.slots:
viom_table['Slots'] = {
'Slot': [s.get_json() for s in self.slots.values()]
}
if self.manage:
viom_table['VIOMManage'] = self.manage.get_json()
return viom_table | python | def get_json(self):
"""Create JSON data for AdapterConfig.
:returns: JSON data as follows:
{
"VIOMManage":{
},
"InitBoot":{
},
"UseVirtualAddresses":{
},
"BootMenuEnable":{
},
"SmuxSetting":{
},
"Slots":{
}
}
"""
viom_table = self.get_basic_json()
if self.slots:
viom_table['Slots'] = {
'Slot': [s.get_json() for s in self.slots.values()]
}
if self.manage:
viom_table['VIOMManage'] = self.manage.get_json()
return viom_table | [
"def",
"get_json",
"(",
"self",
")",
":",
"viom_table",
"=",
"self",
".",
"get_basic_json",
"(",
")",
"if",
"self",
".",
"slots",
":",
"viom_table",
"[",
"'Slots'",
"]",
"=",
"{",
"'Slot'",
":",
"[",
"s",
".",
"get_json",
"(",
")",
"for",
"s",
"in"... | Create JSON data for AdapterConfig.
:returns: JSON data as follows:
{
"VIOMManage":{
},
"InitBoot":{
},
"UseVirtualAddresses":{
},
"BootMenuEnable":{
},
"SmuxSetting":{
},
"Slots":{
}
} | [
"Create",
"JSON",
"data",
"for",
"AdapterConfig",
"."
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/viom/elcm.py#L171-L198 |
openstack/python-scciclient | scciclient/irmc/viom/elcm.py | Slot.get_json | def get_json(self):
"""Create JSON data for slot.
:returns: JSON data for slot as follows:
{
"@SlotIdx":0,
"OnboardControllers":{
"OnboardController": [
]
},
"AddOnCards":{
"AddOnCard": [
]
}
}
"""
json = self.get_basic_json()
if self.onboard_cards:
json['OnboardControllers'] = {
'OnboardController':
[c.get_json() for c in self.onboard_cards.values()]
}
if self.addon_cards:
json['AddOnCards'] = {
'AddOnCard': [c.get_json() for c in self.addon_cards.values()]
}
return json | python | def get_json(self):
"""Create JSON data for slot.
:returns: JSON data for slot as follows:
{
"@SlotIdx":0,
"OnboardControllers":{
"OnboardController": [
]
},
"AddOnCards":{
"AddOnCard": [
]
}
}
"""
json = self.get_basic_json()
if self.onboard_cards:
json['OnboardControllers'] = {
'OnboardController':
[c.get_json() for c in self.onboard_cards.values()]
}
if self.addon_cards:
json['AddOnCards'] = {
'AddOnCard': [c.get_json() for c in self.addon_cards.values()]
}
return json | [
"def",
"get_json",
"(",
"self",
")",
":",
"json",
"=",
"self",
".",
"get_basic_json",
"(",
")",
"if",
"self",
".",
"onboard_cards",
":",
"json",
"[",
"'OnboardControllers'",
"]",
"=",
"{",
"'OnboardController'",
":",
"[",
"c",
".",
"get_json",
"(",
")",
... | Create JSON data for slot.
:returns: JSON data for slot as follows:
{
"@SlotIdx":0,
"OnboardControllers":{
"OnboardController": [
]
},
"AddOnCards":{
"AddOnCard": [
]
}
} | [
"Create",
"JSON",
"data",
"for",
"slot",
"."
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/viom/elcm.py#L260-L287 |
openstack/python-scciclient | scciclient/irmc/viom/elcm.py | LANPort.get_json | def get_json(self):
"""Create JSON data for LANPort.
:returns: JSON data as follows:
{
"@PortIdx":1,
"PortEnable":{
},
"UseVirtualAddresses":{
},
"BootProtocol":{
},
"VirtualAddress":{
"MAC":{
}
},
"BootPriority":{
},
"ISCSIBootEnvironment":{
}
}
"""
port = self.get_basic_json()
port.update({
'BootProtocol': self.boot.BOOT_PROTOCOL,
'BootPriority': self.boot.boot_prio,
})
boot_env = self.boot.get_json()
if boot_env:
port.update(boot_env)
if self.use_virtual_addresses and self.mac:
port['VirtualAddress'] = {'MAC': self.mac}
return port | python | def get_json(self):
"""Create JSON data for LANPort.
:returns: JSON data as follows:
{
"@PortIdx":1,
"PortEnable":{
},
"UseVirtualAddresses":{
},
"BootProtocol":{
},
"VirtualAddress":{
"MAC":{
}
},
"BootPriority":{
},
"ISCSIBootEnvironment":{
}
}
"""
port = self.get_basic_json()
port.update({
'BootProtocol': self.boot.BOOT_PROTOCOL,
'BootPriority': self.boot.boot_prio,
})
boot_env = self.boot.get_json()
if boot_env:
port.update(boot_env)
if self.use_virtual_addresses and self.mac:
port['VirtualAddress'] = {'MAC': self.mac}
return port | [
"def",
"get_json",
"(",
"self",
")",
":",
"port",
"=",
"self",
".",
"get_basic_json",
"(",
")",
"port",
".",
"update",
"(",
"{",
"'BootProtocol'",
":",
"self",
".",
"boot",
".",
"BOOT_PROTOCOL",
",",
"'BootPriority'",
":",
"self",
".",
"boot",
".",
"bo... | Create JSON data for LANPort.
:returns: JSON data as follows:
{
"@PortIdx":1,
"PortEnable":{
},
"UseVirtualAddresses":{
},
"BootProtocol":{
},
"VirtualAddress":{
"MAC":{
}
},
"BootPriority":{
},
"ISCSIBootEnvironment":{
}
} | [
"Create",
"JSON",
"data",
"for",
"LANPort",
"."
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/viom/elcm.py#L422-L456 |
openstack/python-scciclient | scciclient/irmc/viom/elcm.py | FCPort.get_json | def get_json(self):
"""Create FC port.
:returns: JSON for FC port as follows:
{
"@PortIdx":1,
"PortEnable":{
},
"UseVirtualAddresses":{
},
"VirtualAddress":{
"WWNN":{
},
"WWPN":{
},
"MAC":{
}
},
"BootProtocol":{
},
"BootPriority":{
},
"FCBootEnvironment":{
}
}
"""
port = self.get_basic_json()
port.update({
'BootProtocol': self.boot.BOOT_PROTOCOL,
'BootPriority': self.boot.boot_prio,
})
boot_env = self.boot.get_json()
if boot_env:
port.update(boot_env)
if self.use_virtual_addresses:
addresses = {}
if self.wwnn:
addresses['WWNN'] = self.wwnn
if self.wwpn:
addresses['WWPN'] = self.wwpn
if addresses:
port['VirtualAddress'] = addresses
return port | python | def get_json(self):
"""Create FC port.
:returns: JSON for FC port as follows:
{
"@PortIdx":1,
"PortEnable":{
},
"UseVirtualAddresses":{
},
"VirtualAddress":{
"WWNN":{
},
"WWPN":{
},
"MAC":{
}
},
"BootProtocol":{
},
"BootPriority":{
},
"FCBootEnvironment":{
}
}
"""
port = self.get_basic_json()
port.update({
'BootProtocol': self.boot.BOOT_PROTOCOL,
'BootPriority': self.boot.boot_prio,
})
boot_env = self.boot.get_json()
if boot_env:
port.update(boot_env)
if self.use_virtual_addresses:
addresses = {}
if self.wwnn:
addresses['WWNN'] = self.wwnn
if self.wwpn:
addresses['WWPN'] = self.wwpn
if addresses:
port['VirtualAddress'] = addresses
return port | [
"def",
"get_json",
"(",
"self",
")",
":",
"port",
"=",
"self",
".",
"get_basic_json",
"(",
")",
"port",
".",
"update",
"(",
"{",
"'BootProtocol'",
":",
"self",
".",
"boot",
".",
"BOOT_PROTOCOL",
",",
"'BootPriority'",
":",
"self",
".",
"boot",
".",
"bo... | Create FC port.
:returns: JSON for FC port as follows:
{
"@PortIdx":1,
"PortEnable":{
},
"UseVirtualAddresses":{
},
"VirtualAddress":{
"WWNN":{
},
"WWPN":{
},
"MAC":{
}
},
"BootProtocol":{
},
"BootPriority":{
},
"FCBootEnvironment":{
}
} | [
"Create",
"FC",
"port",
"."
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/viom/elcm.py#L476-L519 |
openstack/python-scciclient | scciclient/irmc/viom/elcm.py | CNAPort.get_json | def get_json(self):
"""Create JSON for CNA port.
:returns: JSON for CNA port as follows:
{
"@PortIdx":1,
"PortEnable":{
},
"Functions":{
}
}
"""
port = self.get_basic_json()
port['Functions'] = {
'Function': [f.get_json() for f in self.functions.values()]
}
return port | python | def get_json(self):
"""Create JSON for CNA port.
:returns: JSON for CNA port as follows:
{
"@PortIdx":1,
"PortEnable":{
},
"Functions":{
}
}
"""
port = self.get_basic_json()
port['Functions'] = {
'Function': [f.get_json() for f in self.functions.values()]
}
return port | [
"def",
"get_json",
"(",
"self",
")",
":",
"port",
"=",
"self",
".",
"get_basic_json",
"(",
")",
"port",
"[",
"'Functions'",
"]",
"=",
"{",
"'Function'",
":",
"[",
"f",
".",
"get_json",
"(",
")",
"for",
"f",
"in",
"self",
".",
"functions",
".",
"val... | Create JSON for CNA port.
:returns: JSON for CNA port as follows:
{
"@PortIdx":1,
"PortEnable":{
},
"Functions":{
}
} | [
"Create",
"JSON",
"for",
"CNA",
"port",
"."
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/viom/elcm.py#L539-L555 |
openstack/python-scciclient | scciclient/irmc/viom/elcm.py | FCBoot.get_json | def get_json(self):
"""Create JSON for FCBootEnvironment.
:returns: JSON for FCBootEnvironment as follows:
{
"FCBootEnvironment":{
"FCTargets":{
"FCTarget":[
]
},
"FCLinkSpeed":{
},
"SANBootEnable":{
},
"FCTopology":{
}
}
}
"""
json = self.get_basic_json()
for i in range(len(self.targets)):
# @FCTargetIdx starts from 1.
self.targets[i].set_index(i + 1)
json['FCTargets'] = {
'FCTarget': [t.get_json() for t in self.targets]
}
return {'FCBootEnvironment': json} | python | def get_json(self):
"""Create JSON for FCBootEnvironment.
:returns: JSON for FCBootEnvironment as follows:
{
"FCBootEnvironment":{
"FCTargets":{
"FCTarget":[
]
},
"FCLinkSpeed":{
},
"SANBootEnable":{
},
"FCTopology":{
}
}
}
"""
json = self.get_basic_json()
for i in range(len(self.targets)):
# @FCTargetIdx starts from 1.
self.targets[i].set_index(i + 1)
json['FCTargets'] = {
'FCTarget': [t.get_json() for t in self.targets]
}
return {'FCBootEnvironment': json} | [
"def",
"get_json",
"(",
"self",
")",
":",
"json",
"=",
"self",
".",
"get_basic_json",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"self",
".",
"targets",
")",
")",
":",
"# @FCTargetIdx starts from 1.",
"self",
".",
"targets",
"[",
"i",
"]",
... | Create JSON for FCBootEnvironment.
:returns: JSON for FCBootEnvironment as follows:
{
"FCBootEnvironment":{
"FCTargets":{
"FCTarget":[
]
},
"FCLinkSpeed":{
},
"SANBootEnable":{
},
"FCTopology":{
}
}
} | [
"Create",
"JSON",
"for",
"FCBootEnvironment",
"."
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/viom/elcm.py#L772-L799 |
openstack/python-scciclient | scciclient/irmc/viom/elcm.py | ISCSIInitiator.get_json | def get_json(self):
"""Create JSON data for iSCSI initiator.
:returns: JSON data for iSCSI initiator as follows:
{
"DHCPUsage":{
},
"Name":{
},
"IPv4Address":{
},
"SubnetMask":{
},
"GatewayIPv4Address":{
},
"VLANId":{
}
}
"""
if self.dhcp_usage:
return {'DHCPUsage': self.dhcp_usage,
'Name': self.iqn}
else:
return self.get_basic_json() | python | def get_json(self):
"""Create JSON data for iSCSI initiator.
:returns: JSON data for iSCSI initiator as follows:
{
"DHCPUsage":{
},
"Name":{
},
"IPv4Address":{
},
"SubnetMask":{
},
"GatewayIPv4Address":{
},
"VLANId":{
}
}
"""
if self.dhcp_usage:
return {'DHCPUsage': self.dhcp_usage,
'Name': self.iqn}
else:
return self.get_basic_json() | [
"def",
"get_json",
"(",
"self",
")",
":",
"if",
"self",
".",
"dhcp_usage",
":",
"return",
"{",
"'DHCPUsage'",
":",
"self",
".",
"dhcp_usage",
",",
"'Name'",
":",
"self",
".",
"iqn",
"}",
"else",
":",
"return",
"self",
".",
"get_basic_json",
"(",
")"
] | Create JSON data for iSCSI initiator.
:returns: JSON data for iSCSI initiator as follows:
{
"DHCPUsage":{
},
"Name":{
},
"IPv4Address":{
},
"SubnetMask":{
},
"GatewayIPv4Address":{
},
"VLANId":{
}
} | [
"Create",
"JSON",
"data",
"for",
"iSCSI",
"initiator",
"."
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/viom/elcm.py#L875-L899 |
openstack/python-scciclient | scciclient/irmc/viom/elcm.py | ISCSITarget.get_json | def get_json(self):
"""Create JSON data for iSCSI target.
:returns: JSON data for iSCSI target as follows:
{
"DHCPUsage":{
},
"Name":{
},
"IPv4Address":{
},
"PortNumber":{
},
"BootLUN":{
},
"AuthenticationMethod":{
},
"ChapUserName":{
},
"ChapSecret":{
},
"MutualChapSecret":{
}
}
"""
json = {
'DHCPUsage': self.dhcp_usage,
'AuthenticationMethod': self.auth_method,
}
if not self.dhcp_usage:
json['Name'] = self.iqn
json['IPv4Address'] = self.ip
json['PortNumber'] = self.port
json['BootLUN'] = self.lun
if self.chap_user:
json['ChapUserName'] = self.chap_user
if self.chap_secret:
json['ChapSecret'] = self.chap_secret
if self.mutual_chap_secret:
json['MutualChapSecret'] = self.mutual_chap_secret
return json | python | def get_json(self):
"""Create JSON data for iSCSI target.
:returns: JSON data for iSCSI target as follows:
{
"DHCPUsage":{
},
"Name":{
},
"IPv4Address":{
},
"PortNumber":{
},
"BootLUN":{
},
"AuthenticationMethod":{
},
"ChapUserName":{
},
"ChapSecret":{
},
"MutualChapSecret":{
}
}
"""
json = {
'DHCPUsage': self.dhcp_usage,
'AuthenticationMethod': self.auth_method,
}
if not self.dhcp_usage:
json['Name'] = self.iqn
json['IPv4Address'] = self.ip
json['PortNumber'] = self.port
json['BootLUN'] = self.lun
if self.chap_user:
json['ChapUserName'] = self.chap_user
if self.chap_secret:
json['ChapSecret'] = self.chap_secret
if self.mutual_chap_secret:
json['MutualChapSecret'] = self.mutual_chap_secret
return json | [
"def",
"get_json",
"(",
"self",
")",
":",
"json",
"=",
"{",
"'DHCPUsage'",
":",
"self",
".",
"dhcp_usage",
",",
"'AuthenticationMethod'",
":",
"self",
".",
"auth_method",
",",
"}",
"if",
"not",
"self",
".",
"dhcp_usage",
":",
"json",
"[",
"'Name'",
"]",
... | Create JSON data for iSCSI target.
:returns: JSON data for iSCSI target as follows:
{
"DHCPUsage":{
},
"Name":{
},
"IPv4Address":{
},
"PortNumber":{
},
"BootLUN":{
},
"AuthenticationMethod":{
},
"ChapUserName":{
},
"ChapSecret":{
},
"MutualChapSecret":{
}
} | [
"Create",
"JSON",
"data",
"for",
"iSCSI",
"target",
"."
] | train | https://github.com/openstack/python-scciclient/blob/4585ce2f76853b9773fb190ca0cfff0aa04a7cf8/scciclient/irmc/viom/elcm.py#L919-L960 |
OSSOS/MOP | src/jjk/preproc/plot.py | zscale | def zscale(data,contrast,min=100,max=60000):
"""Scale the data cube into the range 0-255"""
## pic 100 random elements along each dimension
## use zscale (see the IRAF display man page or
## http://iraf.net/article.php/20051205162333315
import random
x=[]
for i in random.sample(xrange(data.shape[0]),50):
for j in random.sample(xrange(data.shape[1]),50):
x.append(data[i,j])
yl=numarray.sort(numarray.clip(x,min,max))
n=len(yl)
ym=sum(yl)/float(n)
xl=numarray.array(range(n))
xm=sum(xl)/float(n)
ss_xx=sum((xl-xm)*(xl-xm))
ss_yy=sum((yl-ym)*(yl-ym))
ss_xy=sum((xl-xm)*(yl-ym))
b=ss_xy/ss_xx
a=ym-b*xm
z1=yl[n/2] + (b/contrast)*(1-n/2)
z2=yl[n/2] + (b/contrast)*(n-n/2)
## Now put the data inbetween Z1 and Z2
high=data-z1
z2=z2-z1
high=numarray.clip(high,0,z2)
## and change that to 0-255
high= 256-256*high/z2
### send back the scalled data
return high | python | def zscale(data,contrast,min=100,max=60000):
"""Scale the data cube into the range 0-255"""
## pic 100 random elements along each dimension
## use zscale (see the IRAF display man page or
## http://iraf.net/article.php/20051205162333315
import random
x=[]
for i in random.sample(xrange(data.shape[0]),50):
for j in random.sample(xrange(data.shape[1]),50):
x.append(data[i,j])
yl=numarray.sort(numarray.clip(x,min,max))
n=len(yl)
ym=sum(yl)/float(n)
xl=numarray.array(range(n))
xm=sum(xl)/float(n)
ss_xx=sum((xl-xm)*(xl-xm))
ss_yy=sum((yl-ym)*(yl-ym))
ss_xy=sum((xl-xm)*(yl-ym))
b=ss_xy/ss_xx
a=ym-b*xm
z1=yl[n/2] + (b/contrast)*(1-n/2)
z2=yl[n/2] + (b/contrast)*(n-n/2)
## Now put the data inbetween Z1 and Z2
high=data-z1
z2=z2-z1
high=numarray.clip(high,0,z2)
## and change that to 0-255
high= 256-256*high/z2
### send back the scalled data
return high | [
"def",
"zscale",
"(",
"data",
",",
"contrast",
",",
"min",
"=",
"100",
",",
"max",
"=",
"60000",
")",
":",
"## pic 100 random elements along each dimension",
"## use zscale (see the IRAF display man page or",
"## http://iraf.net/article.php/20051205162333315",
"import",
"rand... | Scale the data cube into the range 0-255 | [
"Scale",
"the",
"data",
"cube",
"into",
"the",
"range",
"0",
"-",
"255"
] | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/jjk/preproc/plot.py#L12-L52 |
OSSOS/MOP | src/ossos/web/web/initiate/populate_ossuary.py | update_values | def update_values(ims, image_id, iq_zeropt=True, comment=False, snr=False, commdict=None):
"""
Update a row in ossuary with
:param ims: an ImageQuery, contains image table and a connector
:param image_id: the primary key of the row to be updated
:param iq_zeropt: Keyword set if iq and zeropoint are to be checked for updating
:param comment: Keyword set if image is to have a comment of Stephen's added
:param commdict: The dictionary parsed from Stephen's file of comments
:return: No return, just updates ossuary.
"""
updating_params = {}
if iq_zeropt:
updating_params = get_iq_and_zeropoint(image_id, {})
if comment:
updating_params = {'comment': commdict[str(image_id)]}
if snr:
updating_params = get_snr(image_id, {})
ss = ims.images.update(ims.images.c.image_id == image_id)
ims.conn.execute(ss, updating_params)
return | python | def update_values(ims, image_id, iq_zeropt=True, comment=False, snr=False, commdict=None):
"""
Update a row in ossuary with
:param ims: an ImageQuery, contains image table and a connector
:param image_id: the primary key of the row to be updated
:param iq_zeropt: Keyword set if iq and zeropoint are to be checked for updating
:param comment: Keyword set if image is to have a comment of Stephen's added
:param commdict: The dictionary parsed from Stephen's file of comments
:return: No return, just updates ossuary.
"""
updating_params = {}
if iq_zeropt:
updating_params = get_iq_and_zeropoint(image_id, {})
if comment:
updating_params = {'comment': commdict[str(image_id)]}
if snr:
updating_params = get_snr(image_id, {})
ss = ims.images.update(ims.images.c.image_id == image_id)
ims.conn.execute(ss, updating_params)
return | [
"def",
"update_values",
"(",
"ims",
",",
"image_id",
",",
"iq_zeropt",
"=",
"True",
",",
"comment",
"=",
"False",
",",
"snr",
"=",
"False",
",",
"commdict",
"=",
"None",
")",
":",
"updating_params",
"=",
"{",
"}",
"if",
"iq_zeropt",
":",
"updating_params... | Update a row in ossuary with
:param ims: an ImageQuery, contains image table and a connector
:param image_id: the primary key of the row to be updated
:param iq_zeropt: Keyword set if iq and zeropoint are to be checked for updating
:param comment: Keyword set if image is to have a comment of Stephen's added
:param commdict: The dictionary parsed from Stephen's file of comments
:return: No return, just updates ossuary. | [
"Update",
"a",
"row",
"in",
"ossuary",
"with",
":",
"param",
"ims",
":",
"an",
"ImageQuery",
"contains",
"image",
"table",
"and",
"a",
"connector",
":",
"param",
"image_id",
":",
"the",
"primary",
"key",
"of",
"the",
"row",
"to",
"be",
"updated",
":",
... | train | https://github.com/OSSOS/MOP/blob/94f91d32ad5ec081d5a1ebd67604a838003465af/src/ossos/web/web/initiate/populate_ossuary.py#L234-L254 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.