code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1
value | license stringclasses 15
values | size int64 3 1.05M |
|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
# ------------------------------------------------------------
# pelisalacarta 4
# Copyright 2015 [email protected]
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#
# Distributed under the terms of GNU General Public License v3 (GPLv3)
# http://www.gnu.org/licenses/gpl-3.0.html
# ------------------------------------------------------------
# This file is part of pelisalacarta 4.
#
# pelisalacarta 4 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pelisalacarta 4 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pelisalacarta 4. If not, see <http://www.gnu.org/licenses/>.
# --------------------------------------------------------------------------------
# Server management
#------------------------------------------------------------
import os
from core import config
from core import logger
from core import scrapertools
# Funciónn genérica para encontrar ídeos en una página
def find_video_items(item=None, data=None, channel=""):
logger.info("pelisalacarta.core.servertools find_video_items")
# Descarga la página
if data is None:
from core import scrapertools
data = scrapertools.cache_page(item.url)
#logger.info(data)
# Busca los enlaces a los videos
from core.item import Item
listavideos = findvideos(data)
if item is None:
item = Item()
itemlist = []
for video in listavideos:
scrapedtitle = "Enlace encontrado en "+video[2]
scrapedurl = video[1]
server = video[2]
if get_server_parameters(server)["thumbnail"]:
thumbnail = get_server_parameters(server)["thumbnail"]
else:
thumbnail = "http://media.tvalacarta.info/servers/server_"+server+".png"
itemlist.append( Item(channel=item.channel, title=scrapedtitle , action="play" , server=server, url=scrapedurl, thumbnail=thumbnail, show=item.show , plot=item.plot , parentContent=item, folder=False) )
return itemlist
def guess_server_thumbnail(title):
logger.info("pelisalacarta.core.servertools guess_server_thumbnail title="+title)
lowcase_title = title.lower()
if "netu" in lowcase_title:
logger.info("pelisalacarta.core.servertools guess_server_thumbnail caso especial netutv")
return "http://media.tvalacarta.info/servers/server_netutv.png"
if "ul.to" in lowcase_title:
logger.info("pelisalacarta.core.servertools guess_server_thumbnail caso especial ul.to")
return "http://media.tvalacarta.info/servers/server_uploadedto.png"
if "waaw" in lowcase_title:
logger.info("pelisalacarta.core.servertools guess_server_thumbnail caso especial waaw")
return "http://media.tvalacarta.info/servers/server_waaw.png"
if "streamin" in lowcase_title:
logger.info("pelisalacarta.core.servertools guess_server_thumbnail caso especial streamin")
return "http://media.tvalacarta.info/servers/server_streaminto.png"
servers = get_servers_list()
for serverid in servers:
if serverid in lowcase_title:
logger.info("pelisalacarta.core.servertools guess_server_thumbnail encontrado "+serverid)
return "http://media.tvalacarta.info/servers/server_"+serverid+".png"
return ""
def findvideosbyserver(data, serverid):
logger.info("pelisalacarta.core.servertools findvideosbyserver")
encontrados = set()
devuelve = []
try:
exec "from servers import "+serverid
exec "devuelve.extend("+serverid+".find_videos(data))"
except ImportError:
logger.info("No existe conector para #"+serverid+"#")
#import traceback
#logger.info(traceback.format_exc())
except:
logger.info("Error en el conector #"+serverid+"#")
import traceback
logger.info(traceback.format_exc())
return devuelve
def findvideos(data):
logger.info("pelisalacarta.core.servertools findvideos") # en #"+data+"#")
encontrados = set()
devuelve = []
# Ejecuta el findvideos en cada servidor
server_list = get_servers_list()
for serverid in server_list:
try:
# Sustituye el código por otro "Plex compatible"
#exec "from servers import "+serverid
#exec "devuelve.extend("+serverid+".find_videos(data))"
servers_module = __import__("servers."+serverid)
server_module = getattr(servers_module,serverid)
devuelve.extend( server_module.find_videos(data) )
except ImportError:
logger.info("No existe conector para #"+serverid+"#")
#import traceback
#logger.info(traceback.format_exc())
except:
logger.info("Error en el conector #"+serverid+"#")
import traceback
logger.info(traceback.format_exc())
return devuelve
def get_video_urls(server,url):
'''
servers_module = __import__("servers."+server)
server_module = getattr(servers_module,server)
return server_module.get_video_url( page_url=url)
'''
video_urls,puede,motivo = resolve_video_urls_for_playing(server,url)
return video_urls
def get_channel_module(channel_name):
channels_module = __import__("channels."+channel_name)
channel_module = getattr(channels_module,channel_name)
return channel_module
def get_server_from_url(url):
encontrado = findvideos(url)
if len(encontrado)>0:
devuelve = encontrado[0][2]
else:
devuelve = "directo"
return devuelve
def resolve_video_urls_for_playing(server,url,video_password="",muestra_dialogo=False):
logger.info("pelisalacarta.core.servertools resolve_video_urls_for_playing, server="+server+", url="+url)
video_urls = []
torrent = False
server = server.lower()
# Si el vídeo es "directo", no hay que buscar más
if server=="directo" or server=="local":
logger.info("pelisalacarta.core.servertools server=directo, la url es la buena")
try:
import urlparse
parsed_url = urlparse.urlparse(url)
logger.info("parsed_url="+str(parsed_url))
extension = parsed_url.path[-4:]
except:
extension = url[-4:]
video_urls = [[ "%s [%s]" % (extension,server) , url ]]
return video_urls,True,""
# Averigua las URL de los vídeos
else:
# Carga el conector
try:
# Muestra un diágo de progreso
if muestra_dialogo:
from platformcode import platformtools
progreso = platformtools.dialog_progress( "pelisalacarta" , "Conectando con "+server)
server_parameters = get_server_parameters(server)
#Cuenta las opciones disponibles, para calcular el porcentaje
opciones = []
if server_parameters["free"] == "true":
opciones.append("free")
opciones.extend([premium for premium in server_parameters["premium"] if config.get_setting(premium+"premium")=="true"])
logger.info("pelisalacarta.core.servertools opciones disponibles para " + server + ": " + str(len(opciones)) + " "+str(opciones))
# Sustituye el código por otro "Plex compatible"
#exec "from servers import "+server+" as server_connector"
servers_module = __import__("servers."+server)
server_connector = getattr(servers_module,server)
logger.info("pelisalacarta.core.servertools servidor de "+server+" importado")
# Si tiene una función para ver si el vídeo existe, lo comprueba ahora
if hasattr(server_connector, 'test_video_exists'):
logger.info("pelisalacarta.core.servertools invocando a "+server+".test_video_exists")
puedes,motivo = server_connector.test_video_exists( page_url=url )
# Si la funcion dice que no existe, fin
if not puedes:
logger.info("pelisalacarta.core.servertools test_video_exists dice que el video no existe")
if muestra_dialogo: progreso.close()
return video_urls,puedes,motivo
else:
logger.info("pelisalacarta.core.servertools test_video_exists dice que el video SI existe")
# Obtiene enlaces free
if server_parameters["free"]=="true":
if muestra_dialogo:
progreso.update((100 / len(opciones)) * opciones.index("free") , "Conectando con "+server)
logger.info("pelisalacarta.core.servertools invocando a "+server+".get_video_url")
video_urls = server_connector.get_video_url( page_url=url , video_password=video_password )
# Si no se encuentran vídeos en modo free, es porque el vídeo no existe
if len(video_urls)==0:
if muestra_dialogo: progreso.close()
return video_urls,False,"No se puede encontrar el vídeo en "+server
# Obtiene enlaces para las diferentes opciones premium
error_message = []
for premium in server_parameters["premium"]:
if config.get_setting(premium+"premium")=="true":
if muestra_dialogo:
progreso.update((100 / len(opciones)) * opciones.index(premium) , "Conectando con "+premium)
exec "from servers import "+premium+" as premium_conector"
if premium == "realdebrid":
debrid_urls = premium_conector.get_video_url( page_url=url , premium=True , video_password=video_password )
if not "REAL-DEBRID:" in debrid_urls[0][0]:
video_urls.extend(debrid_urls)
else:
error_message.append(debrid_urls[0][0])
elif premium == "alldebrid":
alldebrid_urls = premium_conector.get_video_url( page_url=url , premium=True , user=config.get_setting(premium+"user") , password=config.get_setting(premium+"password"), video_password=video_password )
if not "Alldebrid:" in alldebrid_urls[0][0]:
video_urls.extend(alldebrid_urls)
else:
error_message.append(alldebrid_urls[0][0])
else:
video_urls.extend(premium_conector.get_video_url( page_url=url , premium=True , user=config.get_setting(premium+"user") , password=config.get_setting(premium+"password"), video_password=video_password ))
if not video_urls and error_message:
return video_urls, False, " || ".join(error_message)
if muestra_dialogo:
progreso.update( 100 , "Proceso finalizado")
# Cierra el diálogo de progreso
if muestra_dialogo: progreso.close()
# Llegas hasta aquí y no tienes ningún enlace para ver, así que no vas a poder ver el vídeo
if len(video_urls)==0:
# ¿Cual es el motivo?
# 1) No existe -> Ya está controlado
# 2) No tienes alguna de las cuentas premium compatibles
# Lista de las cuentas que soportan este servidor
listapremium = []
for premium in server_parameters["premium"]:
listapremium.append(get_server_parameters(premium)["name"])
return video_urls,False,"Para ver un vídeo en "+server+" necesitas<br/>una cuenta en "+" o ".join(listapremium)
except:
if muestra_dialogo: progreso.close()
import traceback
logger.info(traceback.format_exc())
return video_urls,False,"Se ha producido un error en<br/>el conector con "+server
return video_urls,True,""
def is_server_enabled(server):
try:
server_parameters = get_server_parameters(server)
if server_parameters["active"] == "true":
if not config.get_setting("hidepremium")=="true":
return True
else:
if server_parameters["free"] == "true":
return True
if [premium for premium in server_parameters["premium"] if config.get_setting(premium+"premium")=="true"]:
return True
else:
return False
else:
return False
except:
import traceback
logger.info(traceback.format_exc())
return False
def get_server_parameters(server):
server=scrapertools.find_single_match(server,'([^\.]+)')
try:
JSONFile = xml2dict(os.path.join(config.get_runtime_path(),"servers", server + ".xml"))["server"]
if type(JSONFile["premium"]) == dict: JSONFile["premium"]=JSONFile["premium"]["value"]
if JSONFile["premium"] == "": JSONFile["premium"]=[]
if type(JSONFile["premium"]) == str and not JSONFile["premium"] == "": JSONFile["premium"]=[JSONFile["premium"]]
return JSONFile
except:
logger.info("Error al cargar el servidor: " + server)
import traceback
logger.info(traceback.format_exc())
return {}
def get_servers_list():
logger.info("pelisalacarta.core.servertools get_servers_list")
ServersPath = os.path.join(config.get_runtime_path(),"servers")
ServerList={}
for server in os.listdir(ServersPath):
if server.endswith(".xml"):
if is_server_enabled(server):
server_parameters = get_server_parameters(server)
ServerList[server_parameters["id"]] = server_parameters
return ServerList
def xml2dict(file = None, xmldata = None):
import re, sys, os
parse = globals().get(sys._getframe().f_code.co_name)
if xmldata == None and file == None: raise Exception("No hay nada que convertir!")
if xmldata == None:
if not os.path.exists(file): raise Exception("El archivo no existe!")
xmldata = open(file, "rb").read()
matches = re.compile("<(?P<tag>[^>]+)>[\n]*[\s]*[\t]*(?P<value>.*?)[\n]*[\s]*[\t]*<\/(?P=tag)\s*>",re.DOTALL).findall(xmldata)
return_dict = {}
for tag, value in matches:
#Si tiene elementos
if "<" and "</" in value:
if tag in return_dict:
if type(return_dict[tag])== list:
return_dict[tag].append(parse(xmldata=value))
else:
return_dict[tag] = [dct[tags[x]]]
return_dict[tag].append(parse(xmldata=value))
else:
return_dict[tag] = parse(xmldata=value)
else:
if tag in return_dict:
if type(return_dict[tag])== list:
return_dict[tag].append(value)
else:
return_dict[tag] = [return_dict[tag]]
return_dict[tag].append(value)
else:
return_dict[tag] = value
return return_dict
| ChopChopKodi/pelisalacarta | python/main-classic/core/servertools.py | Python | gpl-3.0 | 15,219 |
# -*- coding: UTF-8 -*-
import os
import json
import logging
import time
import copy
import tornado
from sqlalchemy.orm import query,aliased
from sqlalchemy import and_,or_,desc,asc
from sqlalchemy import func
from sqlalchemy.orm.exc import NoResultFound
import async
import static_config
import database
import config
import util
import callback_lib
import mm
from controller import BaseHandler
from controller import shell_lib
from model.instance import Instance
from model.host_group_var import Host,Group,GroupHost,HostVar,GroupVar
from model.task import Task
from model.services import Service
from model.callback import CallBack
app_log = logging.getLogger("tornado.application")
class AdminHandler(BaseHandler):
@tornado.web.authenticated
def get(self):
user = self.get_current_user();
if user['type'] != 0 :
self.ret("error","this user is not admin")
return
self.render("admin.html")
class AdminBackHandler(BaseHandler):
@tornado.web.authenticated
def get(self , path):
user = self.get_current_user();
if user['type'] != 0 :
self.ret("error","this user is not admin")
return
if hasattr(self, path) :
fun = getattr(self, path);
if callable(fun):
apply(fun)
else:
self.ret("error","unsupport action")
@tornado.web.authenticated
def post(self , path):
self.get(path)
def user(self):
user = self.get_current_user();
ret = {"user":user,"menus":static_config.adminmenus}
self.ret("ok", "", ret);
#获取所有服务的静态信息
def services_info(self):
session = database.getSession()
active = []
for service in session.query(Service):
if service.status == Service.STATUS_ACTIVE :
active.append(service.service)
services_copy = copy.deepcopy(static_config.services)
for temp in services_copy:
if temp['name'] in active:
temp['active'] = True;
else:
temp['active'] = False;
#计算url
if temp.has_key('web') :
urls = []
for web in temp['web'] :
port = ""
for gv in session.query(GroupVar).filter(GroupVar.name == web['port']) :
port = gv.value
for instance in session.query(Instance).filter(Instance.role == web['role']) :
url = {"role":web['role'],"host":instance.host,"port":port}
urls.append(url)
temp['urls'] = urls;
else:
temp['urls'] = []
#特殊规则
#根据dfs_namenode_support_allow_format 配置 控制是否放出format参数
if temp['name'] == 'hdfs' :
should_format = database.get_service_conf(session,'hdfs','dfs_namenode_support_allow_format')
if should_format != None and should_format != 'true' :
wi = 0
find = False;
for action in temp['actions']:
if action['name'] == 'format':
find = True
break;
wi = wi +1
if find:
del temp['actions'][wi]
ret = { "services" : services_copy , "role_check_map" : static_config.role_check_map }
session.close()
self.ret("ok", "", ret);
def service_info(self):
service = self.get_argument("service")
ret = { "name": service,"instances" : self.get_instance(service),"summary":self.get_service_summary(service) }
self.ret("ok", "", ret)
def get_instance(self,service):
session = database.getSession()
instances = session.query(Instance).filter(Instance.service == service )
ret = []
for instance in instances:
ret.append(instance.format())
session.close()
return ret;
def get_service_summary(self,service):
session = database.getSession()
ret = {}
for role in static_config.get_role_from_service(service):
ret[role] = {}
for instance in session.query(Instance).filter(Instance.service==service):
inst = instance.format()
if not ret[inst["role"]].has_key(inst["health"]) :
ret[inst["role"]][inst["health"]] = 0
ret[inst["role"]][inst["health"]] += 1
session.close()
return ret;
#获取所有的机器和组
def group_host_list(self):
session = database.getSession()
groups = session.query(Group)
ret={}
temp=[]
for group in groups:
temp.append( {"name" : group.group});
ret["groups"]=temp
hosts = session.query(Host).filter(Host.status==Host.STATUS_READY)
temp=[];
for host in hosts:
temp.append( {"name" : host.hostname});
ret["hosts"]=temp
session.close()
self.ret("ok", "", ret);
#获取配置变量的接口 兼容组变量和机器变量,机器变量不过滤机器名称
def conf_var(self):
service = self.get_argument("service")
group = self.get_argument("group","all")
showType = self.get_argument("showType")
temp = []
session = database.getSession()
if showType=="group":
groupVars = session.query(GroupVar).filter(and_( GroupVar.service == service , GroupVar.group == group ) )
for groupVar in groupVars:
temp.append( groupVar.format() );
else:
hostVars = session.query(HostVar).filter( HostVar.service == service )
for hostVar in hostVars:
temp.append( hostVar.format() );
session.close()
self.ret("ok", "", {"conf":temp})
#获取fair scheduler的信息
def fair_scheduler_config(self):
session = database.getSession()
#获取队列
queues = database.get_service_conf(session,"yarn","fair_scheduler_queues")
yarn_app_mapreduce_am_resource_mb = database.get_service_conf(session,"yarn","yarn_app_mapreduce_am_resource_mb")
mapreduce_map_memory_mb = database.get_service_conf(session,"yarn","mapreduce_map_memory_mb")
mapreduce_reduce_memory_mb = database.get_service_conf(session,"yarn","yarn_app_mapreduce_am_resource_mb")
#计算node
nodes = 0;
node = []
for instance in session.query(Instance).filter(Instance.role == "nodemanager"):
nodes = nodes + 1
node.append(instance.host)
session.query()
node_memory = database.get_conf_from_host(session,node,"yarn","yarn_nm_resource_memory_mb")
#计算host
total_memory = 0;
for (node,memory) in node_memory.items():
total_memory = total_memory + int(memory)
self.ret("ok","",{"fair_scheduler_queues":queues,"yarn_app_mapreduce_am_resource_mb":yarn_app_mapreduce_am_resource_mb,
"mapreduce_map_memory_mb":mapreduce_map_memory_mb,"mapreduce_reduce_memory_mb":mapreduce_reduce_memory_mb,
"total_memory":total_memory,"nodes":nodes,"node_memory":node_memory
})
#保存 修改 删除 分组变量或者机器变量
#TODO增加区分是否第一次插入
def save_conf_var(self):
service = self.get_argument("service")
showType = self.get_argument("showType")
group = self.get_argument("group","")
host = self.get_argument("host","")
name = self.get_argument("name")
value = self.get_argument("value")
type = self.get_argument("type")
text = self.get_argument("text","")
showdel = self.get_argument("del","")
self.save_var_todb(service,showType,group,host,name,value,type,text,showdel)
self.ret("ok", "", {})
def save_var_todb(self,service,showType,group,host,name,value,type,text,showdel=""):
value = str(value)
session = database.getSession()
if showType=="group":
groupVar = GroupVar(group,service,name,value,type,text)
if showdel=="del":
for groupVar in session.query(GroupVar).filter( and_( GroupVar.service == service , GroupVar.group == group , GroupVar.name == name )) :
session.delete(groupVar)
session.commit()
else:
session.merge(groupVar)
session.commit()
else:
hostVar = HostVar(host,service,name,value,type,text)
if showdel=="del":
for hostVar in session.query(HostVar).filter( and_( HostVar.service == service , HostVar.host == host , HostVar.name == name )) :
session.delete(hostVar)
session.commit()
else:
session.merge(hostVar)
session.commit()
session.close()
# 提交一个执行任务
# 当前直接把收到start的instance,标记为start状态
# 当前直接把收到stop的instance,标记为stop状态
# TODO 加入starting stopping 状态进行检查
#
def send_action(self):
taskType = self.get_argument("taskType","ansible")
service = self.get_argument("service")
actionType = self.get_argument("actionType")
instances = self.get_argument("instances","")
taskName = self.get_argument("taskName")
running_id = []
session = database.getSession()
#在执行action之前,检查角色的数量是不是符合要求
#如果不符合,给出提示
ret_msg = []
#角色数量检查
(check,warn_msg) = self.check_role_num_by_service(session, service, "It could make the task fail.")
if not check:
ret_msg += warn_msg
if actionType=="service":
#针对服务的操作
self.update_with_service_action(session,service,taskName)
taskid = database.build_task(session,taskType,service,"","",taskName)
running_id.append(taskid)
elif actionType=="instance":
for instance in instances.split(","):
(host,role) = Instance.split_instance_name(instance)
if host != None and role != None :
self.update_with_instance_action(session,service,host,role,taskName)
taskid = database.build_task(session,taskType,service,host,role,taskName)
running_id.append(taskid)
else:
self.ret("error","split instance name %s error" % instance)
return
else:
self.ret("error", "unsport actionType")
return
session.commit()
session.close()
#发送消息到MQ
msg = ','.join([str(rid) for rid in running_id])
if not mm.send(msg):
ret_msg.append("send message to worker error")
ret_msg_str = ""
if len(ret_msg) != 0:
ret_msg_str = ",".join(ret_msg)
self.ret("ok", ret_msg_str, {"runningid": running_id})
#对某个task发送kill命令
def kill_task(self):
taskid = self.get_argument("taskid")
#发送消息到MQ
if not mm.kill_task(int(taskid)):
self.ret("error", "killing task failed")
else:
self.ret("ok", "")
#尝试重跑某个失败的task
def rerun_task(self):
taskid = self.get_argument("taskid")
session = database.getSession()
try:
task = session.query(Task).filter(Task.id == taskid).one()
except NoResultFound:
return self.ret("error", "Cant't find the task with id: %s" % taskid)
newTaskid = database.build_task(session,task.taskType,task.service,task.host,task.role,task.task)
for cb in session.query(CallBack).filter(CallBack.taskid == taskid):
callback_lib.add_callback(session,newTaskid,cb.func,json.loads(cb.params) )
#发送消息到MQ
retMsg = ""
msg = str(newTaskid)
if not mm.send(msg):
retMsg = "send message to worker error"
app_log.info("send msg to mq")
session.close()
self.ret("ok", retMsg, {"taskid":newTaskid} )
def update_with_service_action(self,session,service,taskName):
'''
收一个action,进行状态更新
进行状态管理
'''
if taskName == "start" :
session.query(Instance).filter(Instance.service==service) \
.update({Instance.status:Instance.STATUS_START,\
Instance.uptime:int(time.time())})
session.commit();
elif taskName == "stop" :
session.query(Instance).filter(Instance.service==service) \
.update({Instance.status:Instance.STATUS_STOP,
Instance.uptime:0})
session.commit();
if taskName == "aux" and service == "hive" :
upload_path = config.aux_upload_dir
aux_list = []
for file in os.listdir(upload_path):
if file.startswith('.'):
continue
file_path = os.path.join(upload_path,file)
if os.path.isfile(file_path):
aux_list.append("file://" + file_path)
session.query(GroupVar).filter( and_((GroupVar.service==service),(GroupVar.name=="hive_aux_jars_path")) ) \
.update({GroupVar.value : ','.join(aux_list) })
session.commit();
def update_with_instance_action(self,session,service,host,role,taskName):
if taskName == "start" :
session.query(Instance).filter(and_(Instance.service==service, \
Instance.host == host, Instance.role == role )) \
.update({Instance.status:Instance.STATUS_START,
Instance.uptime:int(time.time())})
session.commit();
elif taskName == "stop" :
session.query(Instance).filter(and_(Instance.service==service, \
Instance.host == host, Instance.role == role )) \
.update({Instance.status:Instance.STATUS_STOP,
Instance.uptime:0})
session.commit();
#添加一个机器
#端口 用户名 密码 等都是空的 在异步连接的时候会补充这个
def add_host(self):
hosts = self.get_argument("hosts")
port = self.get_argument("port","")
user = self.get_argument("user","")
passwd = self.get_argument("passwd","")
sudopasswd = self.get_argument("sudopasswd","")
host_array = hosts.split(",")
(check,msg) = self.check_add_host(host_array)
if not check:
self.ret("error", msg)
return
id = async.async_setup()
async.async_run(async.add_host,(id,host_array,(user,port,passwd,sudopasswd)))
self.ret("ok", "", {"runningId": [id]})
def check_add_host(self,hostArray):
session = database.getSession()
for host in hostArray:
num = session.query(Host).filter(Host.hostname==host).count()
if util.look_like_ip(host) :
return (False,host+" look like ip, please check")
if num != 0 :
return (False,host+" is already in host table")
session.close()
return (True,"")
#查询进度
def query_progress(self):
idList = self.get_argument("id")
ids = json.loads(idList)
progress = 0;
progress_msg = "";
session = database.getSession()
for nid in ids:
(pg,msg) = self.query_id_process(session,nid)
if nid < 0:
progress_msg += "SyncTask taskid: (%d) %s \n" % (-nid,msg);
else:
progress_msg += "Task taskid:(%d) %s \n" % (nid,msg);
progress += int(pg)
session.close()
progress /= len(ids)
self.ret("ok", "", {"id": ids,"progress":progress,"progressMsg":progress_msg } )
def query_id_process(self,session,nid):
if nid <0 :
#同步任务
return (async.async_get(nid,"progress","0"),async.async_pop(nid,"progressMsg",""))
else:
#worker 任务
queryTask = session.query(Task).filter(Task.id==nid)
if queryTask.count() == 0:
return (0,str(id)+" isn't exist")
else:
nowTask = queryTask[0]
return (nowTask.getProcess(),nowTask.msg)
#获取机器列表
def hosts(self):
session = database.getSession()
hosts = session.query(Host)
ret={}
for host in hosts:
ret[host.hostname]={"info":host.format()}
session.close()
self.ret("ok", "", {"hosts":ret})
def set_rack(self):
hosts = self.get_argument("hosts")
rack = self.get_argument("rack")
session = database.getSession()
session.query(Host).filter(Host.hostname.in_(hosts.split(","))).update( { Host.rack:rack },synchronize_session="fetch" )
session.commit()
session.close()
self.ret("ok","")
def del_host(self):
hosts = self.get_argument("hosts")
session = database.getSession()
(check,msg)=self.check_del_host(session,hosts)
if not check:
self.ret("error", msg)
return
#删除机器
queryHosts = session.query(Host).filter(Host.hostname.in_(hosts.split(",")))
for host in queryHosts:
session.delete(host)
#删除分组信息
queryGH = session.query(GroupHost).filter(GroupHost.hostname.in_(hosts.split(",")))
for gh in queryGH:
session.delete(gh)
session.commit()
session.close()
self.ret("ok", "")
def check_del_host(self,session,hosts):
num = session.query(Instance).filter(Instance.host.in_(hosts.split(","))).count()
if num != 0 :
return (False,"some host find in instance.please remove them first")
return (True,""+str(num))
#查询机器和角色的关系
def host_role(self):
session= database.getSession()
active=[]
for service in session.query(Service):
if service.status == Service.STATUS_ACTIVE :
active.append(service.service)
roles = {};
for service in static_config.services:
if service["name"] in active:
roles[service["name"]] = service["role"]
hostroles = {}
doing=[]
#补充所有的host列表
hosts = session.query(Host).filter(Host.status == Host.STATUS_READY)
for host in hosts:
hostname = host.hostname;
hostroles[hostname]={};
hostroles[hostname]['role']=[]
instances = session.query(Instance)
for instance in instances:
role = instance.role
host = instance.host
hostroles[host]['role'].append(role)
if instance.status == Instance.STATUS_SETUP or instance.status == Instance.STATUS_REMOVING :
doing.append({"host":host,"role":role,"status":instance.status})
session.close()
self.ret("ok", "",{"roles":roles,"hostroles":hostroles,"doing":doing})
#查询正在进行的服务
def doing(self):
doing = []
session = database.getSession()
instances = session.query(Instance)
for instance in instances:
role = instance.role
host = instance.host
if instance.status == Instance.STATUS_SETUP or instance.status == Instance.STATUS_REMOVING :
doing.append({"host":host,"role":role,"status":instance.status})
session.close()
self.ret("ok", "",{"doing":doing})
#添加一个服务
def add_service(self):
service = self.get_argument("service")
add_args = self.get_argument("add")
var_args = self.get_argument("vars","[]")
add_instance = json.loads(add_args)
#设定一些必要的变量
varArgs = json.loads(var_args)
for var in varArgs:
self.save_var_todb(var['service'],var['showType'],var['group'],
var['host'],var['name'],var['value'],var['type'],
var['text'])
#开启服务
new_ser = Service(service,Service.STATUS_ACTIVE)
session = database.getSession()
session.merge(new_ser)
session.commit()
session.close()
self.inner_add_del_instance(add_instance, [])
def can_del_service(self):
service = self.get_argument("service")
session = database.getSession()
instances = [];
for instance in session.query(Instance).filter(Instance.service == service):
instances.append(instance.get_instance_name(instance.host, instance.role))
session.close()
if len(instances) == 0:
self.ret("ok", "")
else:
self.ret("error","some instance is exist please remove then first. instances:"+(",".join(instances)))
def del_service(self):
service = self.get_argument("service")
#关闭服务
new_ser = Service(service,Service.STATUS_INIT)
session = database.getSession()
session.merge(new_ser)
session.commit()
session.close()
self.ret("ok", "")
#添加删除实例instance
#删除提交任务,并且轮询任务是否执行完成
#如果任务执行完成,就删除
def add_del_instance(self):
add_args = self.get_argument("add","[]")
del_args = self.get_argument("del","[]")
var_args = self.get_argument("vars","[]")
#设定一些必要的变量
var_args = json.loads(var_args)
for var in var_args:
self.save_var_todb(var['service'],var['showType'],var['group'],
var['host'],var['name'],var['value'],var['type'],
var['text'])
add_instance = json.loads(add_args)
del_instance = json.loads(del_args)
self.inner_add_del_instance(add_instance,del_instance)
def inner_add_del_instance(self,add_instance,del_instance):
session = database.getSession()
ret_msg = []
(check,msg) = self.check_add_del_instance( session, add_instance, del_instance)
if not check:
self.ret("error", msg);
return;
else:
if msg != "" and isinstance(msg, list) :
ret_msg += msg
elif isinstance(msg, str) :
ret_msg.append(msg)
add_running_id = self.add_instance( add_instance )
del_running_id = self.del_instance( del_instance )
for taskid in add_running_id:
callback_lib.add_callback(session,taskid,"dealAddInstance")
for taskid in del_running_id:
callback_lib.add_callback(session,taskid,"dealDelInstance")
session.close()
#发送消息到MQ
msg = ','.join([str(id) for id in (add_running_id + del_running_id)])
if not mm.send(msg):
ret_msg.append("send message to worker error")
self.ret("ok", '\n'.join(ret_msg), {"addRunningId":add_running_id,"delRunningId":del_running_id})
def add_instance(self,addInstance):
#将add插入到instance表
session = database.getSession()
for add_inst in addInstance:
temp_service = static_config.get_service_from_role(add_inst["role"])
new_in = Instance(temp_service,add_inst["host"],add_inst["role"])
new_in.status = Instance.STATUS_SETUP
session.merge(new_in)
session.commit()
#提交活动
running_id=[]
for add_inst in addInstance:
temp_service = static_config.get_service_from_role(add_inst["role"])
taskid = database.build_task(session,"ansible",temp_service,add_inst["host"],add_inst["role"],"setup")
running_id.append(taskid)
session.commit()
session.close()
return running_id
def del_instance(self,delInstance):
#更新instance表的对应状态为removing
session = database.getSession()
for delInst in delInstance:
session.query(Instance).filter(and_(Instance.host==delInst["host"],Instance.role==delInst["role"])) \
.update({Instance.status:Instance.STATUS_REMOVING})
session.commit()
#提交卸载活动
running_id=[]
for delInst in delInstance:
tempService = static_config.get_service_from_role(delInst["role"])
#newTask = Task("ansible",tempService,delInst["host"],delInst["role"],"remove")
#session.add(newTask)
#session.flush();
#running_id.append(newTask.id)
new_taskid = database.build_task(session,"ansible",tempService,delInst["host"],delInst["role"],"remove")
running_id.append(new_taskid)
session.commit()
session.close()
return running_id
def check_add_del_instance(self,session,add_instance,del_instance):
if len(add_instance) == 0 and len(del_instance) == 0:
self.ret("error", "no instance need to add or del");
return;
#角色数量检查
role_num_query = session.query(Instance.role,func.count(Instance.id)).group_by(Instance.role)
role_num = {}
for record in role_num_query:
role_num[record[0]] = record[1]
add_del_num = {}
for add_inst in add_instance:
num = session.query(Instance).filter(and_(Instance.host == add_inst["host"], \
Instance.role == add_inst["role"])).count()
if num == 1:
return (False,"instance is exist (%s,%s) " % ( add_inst["host"], add_inst["role"]) )
if add_del_num.has_key( add_inst["role"] ) :
add_del_num[add_inst["role"]] = add_del_num[add_inst["role"]] + 1
else:
add_del_num[add_inst["role"]] = 1;
for del_inst in del_instance:
query = session.query(Instance).filter(and_(Instance.host == del_inst["host"], \
Instance.role == del_inst["role"]))
num = query.count();
if num == 0 or num > 1:
return (False,"instance is not exist ( %s,%s) " % ( del_inst["host"] ,del_inst["role"] ))
else:
for instance in query:
if instance.status != "stop":
return (False,"instance's status is not stop (%s,%s) " % ( del_inst["host"], del_inst["role"]) )
if add_del_num.has_key( del_inst["role"] ) :
add_del_num[del_inst["role"]] = add_del_num[del_inst["role"]] - 1
else:
add_del_num[del_inst["role"]] = -1;
#合并role_num和add_del_num,然后计算角色数量是否符合
warn_msg = []
for (role,new_num) in add_del_num.items():
old_num = 0;
if role_num.has_key(role) :
old_num = role_num[role]
(check,msg) = self.check_role_num( role, old_num+new_num )
if not check :
warn_msg.append(msg)
return (True, warn_msg)
def check_role_num_by_service(self, session, service, add_more_msg=""):
#角色数量检查
role_num_query = session.query(Instance.role,func.count(Instance.id)).group_by(Instance.role)
checkResult = True
warnMsg = []
for record in role_num_query:
(check,msg) = self.check_role_num( record[0], record[1], add_more_msg )
if not check:
checkResult = False
warnMsg.append(msg)
return ( checkResult, warnMsg )
def check_role_num(self, role, new_num, add_more_msg=""):
"""
检查这个角色的数量是不是符合要求
"""
if static_config.role_check_map.has_key( role ) :
temp = static_config.role_check_map[role]
if temp.has_key("min") and new_num < temp["min"] :
return (False, "role %s 's number %d shoule more than or equal %d.%s"
% ( role, new_num, temp["min"], add_more_msg) )
if temp.has_key("max") and new_num > temp["max"] :
return (False, "role %s 's number %d shoule less than or equal %d.%s"
% ( role, new_num, temp["max"], add_more_msg) )
if temp.has_key("equal") and new_num != temp["equal"] :
return (False, "role %s 's number %d shoule equal to %d.%s"
% ( role, new_num, temp["equal"], add_more_msg) )
return (True,"")
#查询任务
#dir=desc&limit=50&offset=0&orderby=id&search=aaa
def tasks(self):
search = self.get_argument("search","")
orderby = self.get_argument("orderby","")
dir = self.get_argument("dir","")
offset = self.get_argument("offset","")
limit = self.get_argument("limit","")
session = database.getSession()
query = session.query(Task)
if search != "" :
search='%'+search+'%'
query = query.filter(or_(Task.id.like(search),Task.taskType.like(search),Task.service.like(search), \
Task.host.like(search),Task.role.like(search),Task.task.like(search), \
Task.status.like(search), Task.result.like(search)))
total_task = query.count();
if dir=="asc":
query = query.order_by(asc(orderby))[int(offset):int(offset)+int(limit)]
else :
query = query.order_by(desc(orderby))[int(offset):int(offset)+int(limit)]
task_list=[]
for task in query:
task_list.append(task.format())
session.close()
self.ret("ok", "", {"tasks":task_list,"totalTask":total_task})
#查询单个任务的详细
def task_detail(self):
taskid = self.get_argument("taskid")
session = database.getSession()
task = session.query(Task).filter(Task.id==taskid).first()
tf = task.format()
tf['msg'] = task.msg
session.close()
self.ret("ok", "", {"task":tf})
#查询机器和组的对应关系
def host_group(self):
session = database.getSession()
groups = {}
hostgroups = {}
for host in session.query(Host).filter(Host.status == Host.STATUS_READY ):
hostgroups[host.hostname]={}
hostgroups[host.hostname]["group"]=['all']
for group in session.query(Group):
groups[group.group]=group.text
for gh in session.query(GroupHost):
hostgroups[gh.hostname]["group"].append(gh.group)
session.close()
self.ret("ok","",{"groups":groups,"hostgroups":hostgroups})
#保存组
def save_group(self):
name = self.get_argument("group")
text = self.get_argument("text","")
toDel = self.get_argument("del","")
nowGroup = Group(name,text)
session = database.getSession()
if toDel=="del":
for group in session.query(Group).filter(Group.group==name):
session.delete(group)
session.commit()
else:
session.merge(nowGroup)
session.commit()
session.close()
self.ret("ok","")
#修改机器和分组的关系
def setup_group(self):
add_args = self.get_argument("add")
del_args = self.get_argument("del")
add_groups = json.loads(add_args)
del_groups = json.loads(del_args)
session = database.getSession()
for addGroup in add_groups:
gh = GroupHost(addGroup['group'],addGroup['host'])
session.merge(gh)
session.commit
for delGroup in del_groups:
query = session.query(GroupHost).filter(and_(GroupHost.hostname==delGroup['host'],GroupHost.group==delGroup['group']))
for gh in query:
session.delete(gh)
session.commit()
session.close()
self.ret("ok","")
#******************************************************
#获取所有的template文件
def template_list(self):
templates={}
for dir in os.listdir(config.template_dir):
if dir.startswith('.') :
continue;
dirPath = os.path.join(config.template_dir,dir)
if os.path.exists(dirPath) and os.path.isdir(dirPath):
templates[dir] = []
for file in os.listdir(dirPath):
filePath = os.path.join(dirPath,file)
app_log.info(filePath)
if os.path.exists(filePath) and os.path.isfile(filePath):
file = file.replace(".j2","")
templates[dir].append(file);
templates[dir].sort()
self.ret("ok","",{"templates":templates})
#获取指定的文件内容
def template_file(self):
dir = self.get_argument("dir")
file = self.get_argument("file")
file = file+".j2"
filePath = os.path.join(config.template_dir,dir,file)
if os.path.exists(filePath) and os.path.isfile(filePath):
content = open(filePath, "r").read()
self.ret("ok","",{"content":content,"row":self.get_content_row(content)})
else:
self.ret("error","file not exist")
def template_build_file(self):
'''
获取生成的配置文件
'''
dir = self.get_argument("dir")
file = self.get_argument("file")
file = file+".j2"
host = self.get_argument("host")
(content,output) = shell_lib.get_template_file(host,dir,file);
if content != "":
self.ret("ok","",{"content":content,"row":self.get_content_row(content) })
else:
self.ret("error",output)
def template_download_file(self):
'''
生成整个服务的配置文件
'''
dir = self.get_argument("dir")
host = self.get_argument("host")
(url,output) = shell_lib.download_template_file(host,dir);
if url != None and url != "":
self.ret("ok","",{"url" : url })
else:
self.ret("error",output)
def get_content_row(self,content):
count = 0 ;
for c in content:
if c == "\n" :
count = count+1
return count;
def save_template_file(self):
dir = self.get_argument("dir")
file = self.get_argument("file")
file = file+".j2"
content = self.get_argument("content")
filePath = os.path.join(config.template_dir,dir,file)
fd = open(filePath,"w")
fd.write(content.encode('utf8'));
time.sleep(2)
self.ret("ok","")
#****************************************************************************************
#manual获取数据库的表
def manual_metadata(self):
table={}
models = database.get_all_models()
temp = {}
for model in models:
temp = {}
temp['column']=[]
temp['primary']=[]
for col in model.__table__.columns:
if col.primary_key:
temp['primary'].append(col.name)
else:
temp['column'].append(col.name)
table[model.__tablename__]=temp
self.ret("ok","",{"table":table})
def manual_query(self):
sql = self.get_argument("sql")
session = database.getSession()
result = session.execute(sql)
data = []
for record in result:
temp = [];
for value in record:
temp.append(value)
data.append(temp);
session.close()
self.ret("ok","",{"column":result.keys(),"data":data})
#修改数据库 直接使用merge进行合并
def manual_execute(self):
sql = self.get_argument("sql")
session = database.getSession()
result = session.execute(sql)
session.commit()
session.flush()
session.close()
self.ret("ok","")
#以下是aux 相关的配置
def aux_get(self):
upload_path = config.aux_upload_dir
file_list = []
if not os.path.exists(upload_path) :
os.makedirs(upload_path)
for file in os.listdir(upload_path):
if file.startswith('.'):
continue
file_path = os.path.join(upload_path,file)
if os.path.isfile(file_path):
size = os.path.getsize(file_path)
file_list.append({"name":file,"size":size})
self.ret("ok","",{"files":file_list})
def aux_upload(self):
upload_path = config.aux_upload_dir
file_metas = self.request.files['file']
result = {}
for meta in file_metas:
filename = meta['filename']
filepath = os.path.join(upload_path,filename)
with open(filepath,'wb') as up:
up.write(meta['body'])
result[filename] = "ok"
self.ret("ok", "", {"result":result})
def aux_delete(self):
upload_path = config.aux_upload_dir
file_name = self.get_argument("filename")
file_path = os.path.join(upload_path, file_name)
try:
os.remove(file_path)
self.ret("ok","")
except:
self.ret("error","delete file %s error" % file_path)
| uhp/uhp | uhpweb/controller/admin.py | Python | gpl-2.0 | 39,163 |
# -*- coding: utf-8 -*-
#
# Aleph documentation build configuration file, created by
# sphinx-quickstart on Fri Dec 2 16:22:48 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
from recommonmark.parser import CommonMarkParser
from recommonmark.transform import AutoStructify
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.coverage',
'autoapi.extension'
]
# Document Python Code
autoapi_dirs = ['../aleph']
autoapi_ignore = [
'*tests/test_*',
'*migrate*',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ['.rst', '.md']
# Enable support for Markdown
#
source_parsers = {'.md': CommonMarkParser}
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Aleph'
copyright = u'2016, aleph Contributors'
author = u'aleph Contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'1.1'
# The full version, including alpha/beta/rc tags.
release = u'1.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
import sphinx_rtd_theme
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
# html_theme = 'default'
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = u'Aleph v1.1'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Alephdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Aleph.tex', u'Aleph Documentation',
u'Friedrich Lindenberg', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# It false, will not define \strong, \code, itleref, \crossref ... but only
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
# packages.
#
# latex_keep_old_macro_names = True
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'aleph', u'Aleph Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Aleph', u'Aleph Documentation',
author, 'Aleph', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
# Better integration with markdown
def setup(app):
app.add_config_value('recommonmark_config', {
'enable_auto_doc_ref': True,
}, True)
app.add_transform(AutoStructify)
| gazeti/aleph | docs/conf.py | Python | mit | 10,610 |
# -*- coding: utf-8 -*-
"""This module contains a function for simplifying tasks in
finite-domain representation (SASTask). Usage:
simplify.filter_unreachable_propositions(sas_task)
simplifies `sas_task` in-place. If simplification detects that the
task is unsolvable, the function raises `simplify.Impossible`. If it
detects that is has an empty goal, the function raises
`simplify.TriviallySolvable`.
The simplification procedure generates DTGs for the task and then
removes facts that are unreachable from the initial state in a DTG.
Note that such unreachable facts can exist even though we perform a
relaxed reachability analysis before grounding (and DTG reachability
is weaker than relaxed reachability) because the previous relaxed
reachability does not take into account any mutex information, while
PDDL-to-SAS conversion gets rid of certain operators that cannot be
applicable given the mutex information.
Despite the name, the method touches more than the set of facts. For
example, operators that have preconditions on pruned facts are
removed, too. (See also the docstring of
filter_unreachable_propositions.)
"""
from __future__ import print_function
from collections import defaultdict
from itertools import count
import sas_tasks
DEBUG = False
# TODO:
# This is all quite hackish and would be easier if the translator were
# restructured so that more information is immediately available for
# the propositions, and if propositions had more structure. Directly
# working with int pairs is awkward.
class DomainTransitionGraph(object):
"""Domain transition graphs.
Attributes:
- init (int): the initial state value of the DTG variable
- size (int): the number of values in the domain
- arcs (defaultdict: int -> set(int)): the DTG arcs (unlabeled)
There are no transition labels or goal values.
The intention is that nodes are represented as ints in {1, ...,
domain_size}, but this is not enforced.
For derived variables, the "fallback value" that is produced by
negation by failure should be used for `init`, so that it is
always considered reachable.
"""
def __init__(self, init, size):
"""Create a DTG with no arcs."""
self.init = init
self.size = size
self.arcs = defaultdict(set)
def add_arc(self, u, v):
"""Add an arc from u to v."""
self.arcs[u].add(v)
def reachable(self):
"""Return the values reachable from the initial value.
Represented as a set(int)."""
queue = [self.init]
reachable = set(queue)
while queue:
node = queue.pop()
new_neighbors = self.arcs.get(node, set()) - reachable
reachable |= new_neighbors
queue.extend(new_neighbors)
return reachable
def dump(self):
"""Dump the DTG."""
print("DTG size:", self.size)
print("DTG init value:", self.init)
print("DTG arcs:")
for source, destinations in sorted(self.arcs.items()):
for destination in sorted(destinations):
print(" %d => %d" % (source, destination))
def build_dtgs(task):
"""Build DTGs for all variables of the SASTask `task`.
Return a list(DomainTransitionGraph), one for each variable.
For derived variables, we do not consider the axiom bodies, i.e.,
we treat each axiom as if it were an operator with no
preconditions. In the case where the only derived variables used
are binary and all rules change the value from the default value
to the non-default value, this results in the correct DTG.
Otherwise, at worst it results in an overapproximation, which
would not threaten correctness."""
init_vals = task.init.values
sizes = task.variables.ranges
dtgs = [DomainTransitionGraph(init, size)
for (init, size) in zip(init_vals, sizes)]
def add_arc(var_no, pre_spec, post):
"""Add a DTG arc for var_no induced by transition pre_spec -> post.
pre_spec may be -1, in which case arcs from every value
other than post are added."""
if pre_spec == -1:
pre_values = set(range(sizes[var_no])).difference([post])
else:
pre_values = [pre_spec]
for pre in pre_values:
dtgs[var_no].add_arc(pre, post)
def get_effective_pre(var_no, conditions, effect_conditions):
"""Return combined information on the conditions on `var_no`
from operator conditions and effect conditions.
- conditions: dict(int -> int) containing the combined
operator prevail and preconditions
- effect_conditions: list(pair(int, int)) containing the
effect conditions
Result:
- -1 if there is no condition on var_no
- val if there is a unique condition var_no=val
- None if there are contradictory conditions on var_no"""
result = conditions.get(var_no, -1)
for cond_var_no, cond_val in effect_conditions:
if cond_var_no == var_no:
if result == -1:
# This is the first condition on var_no.
result = cond_val
elif cond_val != result:
# We have contradictory conditions on var_no.
return None
return result
for op in task.operators:
conditions = dict(op.get_applicability_conditions())
for var_no, _, post, cond in op.pre_post:
effective_pre = get_effective_pre(var_no, conditions, cond)
if effective_pre is not None:
add_arc(var_no, effective_pre, post)
for axiom in task.axioms:
var_no, val = axiom.effect
add_arc(var_no, -1, val)
return dtgs
always_false = object()
always_true = object()
class Impossible(Exception):
pass
class TriviallySolvable(Exception):
pass
class DoesNothing(Exception):
pass
class VarValueRenaming(object):
def __init__(self):
self.new_var_nos = [] # indexed by old var_no
self.new_values = [] # indexed by old var_no and old value
self.new_sizes = [] # indexed by new var_no
self.new_var_count = 0
self.num_removed_values = 0
def dump(self):
old_var_count = len(self.new_var_nos)
print("variable count: %d => %d" % (
old_var_count, self.new_var_count))
print("number of removed values: %d" % self.num_removed_values)
print("variable conversions:")
for old_var_no, (new_var_no, new_values) in enumerate(
zip(self.new_var_nos, self.new_values)):
old_size = len(new_values)
if new_var_no is None:
print("variable %d [size %d] => removed" % (
old_var_no, old_size))
else:
new_size = self.new_sizes[new_var_no]
print("variable %d [size %d] => %d [size %d]" % (
old_var_no, old_size, new_var_no, new_size))
for old_value, new_value in enumerate(new_values):
if new_value is always_false:
new_value = "always false"
elif new_value is always_true:
new_value = "always true"
print(" value %d => %s" % (old_value, new_value))
def register_variable(self, old_domain_size, init_value, new_domain):
assert 1 <= len(new_domain) <= old_domain_size
assert init_value in new_domain
if len(new_domain) == 1:
# Remove this variable completely.
new_values_for_var = [always_false] * old_domain_size
new_values_for_var[init_value] = always_true
self.new_var_nos.append(None)
self.new_values.append(new_values_for_var)
self.num_removed_values += old_domain_size
else:
new_value_counter = count()
new_values_for_var = []
for value in range(old_domain_size):
if value in new_domain:
new_values_for_var.append(next(new_value_counter))
else:
self.num_removed_values += 1
new_values_for_var.append(always_false)
new_size = next(new_value_counter)
assert new_size == len(new_domain)
self.new_var_nos.append(self.new_var_count)
self.new_values.append(new_values_for_var)
self.new_sizes.append(new_size)
self.new_var_count += 1
def apply_to_task(self, task):
if DEBUG:
self.dump()
self.apply_to_variables(task.variables)
self.apply_to_mutexes(task.mutexes)
self.apply_to_init(task.init)
self.apply_to_goals(task.goal.pairs)
self.apply_to_operators(task.operators)
self.apply_to_axioms(task.axioms)
def apply_to_variables(self, variables):
variables.ranges = self.new_sizes
new_axiom_layers = [None] * self.new_var_count
for old_no, new_no in enumerate(self.new_var_nos):
if new_no is not None:
new_axiom_layers[new_no] = variables.axiom_layers[old_no]
assert None not in new_axiom_layers
variables.axiom_layers = new_axiom_layers
self.apply_to_value_names(variables.value_names)
def apply_to_value_names(self, value_names):
new_value_names = [[None] * size for size in self.new_sizes]
for var_no, values in enumerate(value_names):
for value, value_name in enumerate(values):
new_var_no, new_value = self.translate_pair((var_no, value))
if new_value is always_true:
if DEBUG:
print("Removed true proposition: %s" % value_name)
elif new_value is always_false:
if DEBUG:
print("Removed false proposition: %s" % value_name)
else:
new_value_names[new_var_no][new_value] = value_name
assert all((None not in value_names) for value_names in new_value_names)
value_names[:] = new_value_names
def apply_to_mutexes(self, mutexes):
new_mutexes = []
for mutex in mutexes:
new_facts = []
for var, val in mutex.facts:
new_var_no, new_value = self.translate_pair((var, val))
if (new_value is not always_true and
new_value is not always_false):
new_facts.append((new_var_no, new_value))
if len(new_facts) >= 2:
mutex.facts = new_facts
new_mutexes.append(mutex)
mutexes[:] = new_mutexes
def apply_to_init(self, init):
init_pairs = list(enumerate(init.values))
try:
self.convert_pairs(init_pairs)
except Impossible:
assert False, "Initial state impossible? Inconceivable!"
new_values = [None] * self.new_var_count
for new_var_no, new_value in init_pairs:
new_values[new_var_no] = new_value
assert None not in new_values
init.values = new_values
def apply_to_goals(self, goals):
# This may propagate Impossible up.
self.convert_pairs(goals)
if not goals:
# We raise an exception because we do not consider a SAS+
# task without goals well-formed. Our callers are supposed
# to catch this and replace the task with a well-formed
# trivially solvable task.
raise TriviallySolvable
def apply_to_operators(self, operators):
new_operators = []
num_removed = 0
for op in operators:
new_op = self.translate_operator(op)
if new_op is None:
num_removed += 1
if DEBUG:
print("Removed operator: %s" % op.name)
else:
new_operators.append(new_op)
print("%d operators removed" % num_removed)
operators[:] = new_operators
def apply_to_axioms(self, axioms):
new_axioms = []
num_removed = 0
for axiom in axioms:
try:
self.apply_to_axiom(axiom)
except (Impossible, DoesNothing):
num_removed += 1
if DEBUG:
print("Removed axiom:")
axiom.dump()
else:
new_axioms.append(axiom)
print("%d axioms removed" % num_removed)
axioms[:] = new_axioms
def translate_operator(self, op):
"""Compute a new operator from op where the var/value renaming has
been applied. Return None if op should be pruned (because it
is always inapplicable or has no effect.)"""
# We do not call this apply_to_operator, breaking the analogy
# with the other methods, because it creates a new operator
# rather than transforming in-place. The reason for this is
# that it would be quite difficult to generate the operator
# in-place.
# This method is trickier than it may at first appear. For
# example, pre_post values should be fully sorted (see
# documentation in the sas_tasks module), and pruning effect
# conditions from a conditional effects can break this sort
# order. Recreating the operator from scratch solves this
# because the pre_post entries are sorted by
# SASOperator.__init__.
# Also, when we detect a pre_post pair where the effect part
# can never trigger, the precondition part is still important,
# but may be demoted to a prevail condition. Whether or not
# this happens depends on the presence of other pre_post
# entries for the same variable. We solve this by computing
# the sorting into prevail vs. preconditions from scratch, too.
applicability_conditions = op.get_applicability_conditions()
try:
self.convert_pairs(applicability_conditions)
except Impossible:
# The operator is never applicable.
return None
conditions_dict = dict(applicability_conditions)
new_prevail_vars = set(conditions_dict)
new_pre_post = []
for entry in op.pre_post:
new_entry = self.translate_pre_post(entry, conditions_dict)
if new_entry is not None:
new_pre_post.append(new_entry)
# Mark the variable in the entry as not prevailed.
new_var = new_entry[0]
new_prevail_vars.discard(new_var)
if not new_pre_post:
# The operator has no effect.
return None
new_prevail = sorted(
(var, value)
for (var, value) in conditions_dict.items()
if var in new_prevail_vars)
return sas_tasks.SASOperator(
name=op.name, prevail=new_prevail, pre_post=new_pre_post,
cost=op.cost)
def apply_to_axiom(self, axiom):
# The following line may generate an Impossible exception,
# which is propagated up.
self.convert_pairs(axiom.condition)
new_var, new_value = self.translate_pair(axiom.effect)
# If the new_value is always false, then the condition must
# have been impossible.
assert not new_value is always_false
if new_value is always_true:
raise DoesNothing
axiom.effect = new_var, new_value
def translate_pre_post(self, pre_post_entry, conditions_dict):
"""Return a translated version of a pre_post entry.
If the entry never causes a value change, return None.
(It might seem that a possible precondition part of pre_post
gets lost in this case, but pre_post entries that become
prevail conditions are handled elsewhere.)
conditions_dict contains all applicability conditions
(prevail/pre) of the operator, already converted. This is
used to detect effect conditions that can never fire.
The method may assume that the operator remains reachable,
i.e., that it does not have impossible preconditions, as these
are already checked elsewhere.
Possible cases:
- effect is always_true => return None
- effect equals prevailed value => return None
- effect condition is impossible given operator applicability
condition => return None
- otherwise => return converted pre_post tuple
"""
var_no, pre, post, cond = pre_post_entry
new_var_no, new_post = self.translate_pair((var_no, post))
if new_post is always_true:
return None
if pre == -1:
new_pre = -1
else:
_, new_pre = self.translate_pair((var_no, pre))
assert new_pre is not always_false, (
"This function should only be called for operators "
"whose applicability conditions are deemed possible.")
if new_post == new_pre:
return None
new_cond = list(cond)
try:
self.convert_pairs(new_cond)
except Impossible:
# The effect conditions can never be satisfied.
return None
for cond_var, cond_value in new_cond:
if (cond_var in conditions_dict and
conditions_dict[cond_var] != cond_value):
# This effect condition is not compatible with
# the applicability conditions.
return None
assert new_post is not always_false, (
"if we survived so far, this effect can trigger "
"(as far as our analysis can determine this), "
"and then new_post cannot be always_false")
assert new_pre is not always_true, (
"if this pre_post changes the value and can fire, "
"new_pre cannot be always_true")
return new_var_no, new_pre, new_post, new_cond
def translate_pair(self, fact_pair):
(var_no, value) = fact_pair
new_var_no = self.new_var_nos[var_no]
new_value = self.new_values[var_no][value]
return new_var_no, new_value
def convert_pairs(self, pairs):
# We call this convert_... because it is an in-place method.
new_pairs = []
for pair in pairs:
new_var_no, new_value = self.translate_pair(pair)
if new_value is always_false:
raise Impossible
elif new_value is not always_true:
assert new_var_no is not None
new_pairs.append((new_var_no, new_value))
pairs[:] = new_pairs
def build_renaming(dtgs):
renaming = VarValueRenaming()
for dtg in dtgs:
renaming.register_variable(dtg.size, dtg.init, dtg.reachable())
return renaming
def filter_unreachable_propositions(sas_task):
"""We remove unreachable propositions and then prune variables
with only one value.
Examples of things that are pruned:
- Constant propositions that are not detected in instantiate.py
because instantiate.py only reasons at the predicate level, and some
predicates such as "at" in Depot are constant for some objects
(hoists), but not others (trucks).
Example: "at(hoist1, distributor0)" and the associated variable
in depots-01.
- "none of those" values that are unreachable.
Example: at(truck1, ?x) = <none of those> in depots-01.
- Certain values that are relaxed reachable but detected as
unreachable after SAS instantiation because the only operators
that set them have inconsistent preconditions.
Example: on(crate0, crate0) in depots-01.
"""
if DEBUG:
sas_task.validate()
dtgs = build_dtgs(sas_task)
renaming = build_renaming(dtgs)
# apply_to_task may raise Impossible if the goal is detected as
# unreachable or TriviallySolvable if it has no goal. We let the
# exceptions propagate to the caller.
renaming.apply_to_task(sas_task)
print("%d propositions removed" % renaming.num_removed_values)
if DEBUG:
sas_task.validate()
| miquelramirez/LAPKT-public | external/fd/simplify.py | Python | gpl-3.0 | 20,227 |
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import errno
import os
import tempfile
from essential import excutils
from essential import log as logging
LOG = logging.getLogger(__name__)
_FILE_CACHE = {}
def ensure_tree(path):
"""Create a directory (and any ancestor directories required)
:param path: Directory to create
"""
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST:
if not os.path.isdir(path):
raise
else:
raise
def read_cached_file(filename, force_reload=False):
"""Read from a file if it has been modified.
:param force_reload: Whether to reload the file.
:returns: A tuple with a boolean specifying if the data is fresh
or not.
"""
global _FILE_CACHE
if force_reload and filename in _FILE_CACHE:
del _FILE_CACHE[filename]
reloaded = False
mtime = os.path.getmtime(filename)
cache_info = _FILE_CACHE.setdefault(filename, {})
if not cache_info or mtime > cache_info.get('mtime', 0):
LOG.debug("Reloading cached file %s" % filename)
with open(filename) as fap:
cache_info['data'] = fap.read()
cache_info['mtime'] = mtime
reloaded = True
return (reloaded, cache_info['data'])
def delete_if_exists(path, remove=os.unlink):
"""Delete a file, but ignore file not found error.
:param path: File to delete
:param remove: Optional function to remove passed path
"""
try:
remove(path)
except OSError as e:
if e.errno != errno.ENOENT:
raise
@contextlib.contextmanager
def remove_path_on_error(path, remove=delete_if_exists):
"""Protect code that wants to operate on PATH atomically.
Any exception will cause PATH to be removed.
:param path: File to work with
:param remove: Optional function to remove passed path
"""
try:
yield
except Exception:
with excutils.save_and_reraise_exception():
remove(path)
def file_open(*args, **kwargs):
"""Open file
see built-in file() documentation for more details
Note: The reason this is kept in a separate module is to easily
be able to provide a stub module that doesn't alter system
state at all (for unit tests)
"""
return file(*args, **kwargs)
def write_to_tempfile(content, path=None, suffix='', prefix='tmp'):
"""Create temporary file or use existing file.
This util is needed for creating temporary file with
specified content, suffix and prefix. If path is not None,
it will be used for writing content. If the path doesn't
exist it'll be created.
:param content: content for temporary file.
:param path: same as parameter 'dir' for mkstemp
:param suffix: same as parameter 'suffix' for mkstemp
:param prefix: same as parameter 'prefix' for mkstemp
For example: it can be used in database tests for creating
configuration files.
"""
if path:
ensure_tree(path)
(fd, path) = tempfile.mkstemp(suffix=suffix, dir=path, prefix=prefix)
try:
os.write(fd, content)
finally:
os.close(fd)
return path
| gaolichuang/py-essential | essential/fileutils.py | Python | apache-2.0 | 3,825 |
import warnings
import itertools
from contextlib import contextmanager
from distutils.version import LooseVersion
import numpy as np
import matplotlib as mpl
from matplotlib import transforms
from .. import utils
from .. import _py3k_compat as py3k
class Renderer(object):
@staticmethod
def ax_zoomable(ax):
return bool(ax and ax.get_navigate())
@staticmethod
def ax_has_xgrid(ax):
return bool(ax and ax.xaxis._gridOnMajor and ax.yaxis.get_gridlines())
@staticmethod
def ax_has_ygrid(ax):
return bool(ax and ax.yaxis._gridOnMajor and ax.yaxis.get_gridlines())
@property
def current_ax_zoomable(self):
return self.ax_zoomable(self._current_ax)
@property
def current_ax_has_xgrid(self):
return self.ax_has_xgrid(self._current_ax)
@property
def current_ax_has_ygrid(self):
return self.ax_has_ygrid(self._current_ax)
@contextmanager
def draw_figure(self, fig, props):
if hasattr(self, "_current_fig") and self._current_fig is not None:
warnings.warn("figure embedded in figure: something is wrong")
self._current_fig = fig
self._fig_props = props
self.open_figure(fig=fig, props=props)
yield
self.close_figure(fig=fig)
self._current_fig = None
self._fig_props = {}
@contextmanager
def draw_axes(self, ax, props):
if hasattr(self, "_current_ax") and self._current_ax is not None:
warnings.warn("axes embedded in axes: something is wrong")
self._current_ax = ax
self._ax_props = props
self.open_axes(ax=ax, props=props)
yield
self.close_axes(ax=ax)
self._current_ax = None
self._ax_props = {}
@contextmanager
def draw_legend(self, legend, props):
self._current_legend = legend
self._legend_props = props
self.open_legend(legend=legend, props=props)
yield
self.close_legend(legend=legend)
self._current_legend = None
self._legend_props = {}
# Following are the functions which should be overloaded in subclasses
def open_figure(self, fig, props):
"""
Begin commands for a particular figure.
Parameters
----------
fig : matplotlib.Figure
The Figure which will contain the ensuing axes and elements
props : dictionary
The dictionary of figure properties
"""
pass
def close_figure(self, fig):
"""
Finish commands for a particular figure.
Parameters
----------
fig : matplotlib.Figure
The figure which is finished being drawn.
"""
pass
def open_axes(self, ax, props):
"""
Begin commands for a particular axes.
Parameters
----------
ax : matplotlib.Axes
The Axes which will contain the ensuing axes and elements
props : dictionary
The dictionary of axes properties
"""
pass
def close_axes(self, ax):
"""
Finish commands for a particular axes.
Parameters
----------
ax : matplotlib.Axes
The Axes which is finished being drawn.
"""
pass
def open_legend(self, legend, props):
"""
Beging commands for a particular legend.
Parameters
----------
legend : matplotlib.legend.Legend
The Legend that will contain the ensuing elements
props : dictionary
The dictionary of legend properties
"""
pass
def close_legend(self, legend):
"""
Finish commands for a particular legend.
Parameters
----------
legend : matplotlib.legend.Legend
The Legend which is finished being drawn
"""
pass
def draw_marked_line(self, data, coordinates, linestyle, markerstyle,
label, mplobj=None):
"""Draw a line that also has markers.
If this isn't reimplemented by a renderer object, by default, it will
make a call to BOTH draw_line and draw_markers when both markerstyle
and linestyle are not None in the same Line2D object.
"""
if linestyle is not None:
self.draw_line(data, coordinates, linestyle, label, mplobj)
if markerstyle is not None:
self.draw_markers(data, coordinates, markerstyle, label, mplobj)
def draw_line(self, data, coordinates, style, label, mplobj=None):
"""
Draw a line. By default, draw the line via the draw_path() command.
Some renderers might wish to override this and provide more
fine-grained behavior.
In matplotlib, lines are generally created via the plt.plot() command,
though this command also can create marker collections.
Parameters
----------
data : array_like
A shape (N, 2) array of datapoints.
coordinates : string
A string code, which should be either 'data' for data coordinates,
or 'figure' for figure (pixel) coordinates.
style : dictionary
a dictionary specifying the appearance of the line.
mplobj : matplotlib object
the matplotlib plot element which generated this line
"""
pathcodes = ['M'] + (data.shape[0] - 1) * ['L']
pathstyle = dict(facecolor='none', **style)
pathstyle['edgecolor'] = pathstyle.pop('color')
pathstyle['edgewidth'] = pathstyle.pop('linewidth')
self.draw_path(data=data, coordinates=coordinates,
pathcodes=pathcodes, style=pathstyle, mplobj=mplobj)
@staticmethod
def _iter_path_collection(paths, path_transforms, offsets, styles):
"""Build an iterator over the elements of the path collection"""
N = max(len(paths), len(offsets))
# Before mpl 1.4.0, path_transform can be a false-y value, not a valid
# transformation matrix.
if LooseVersion(mpl.__version__) < LooseVersion('1.4.0'):
if path_transforms is None:
path_transforms = [np.eye(3)]
edgecolor = styles['edgecolor']
if np.size(edgecolor) == 0:
edgecolor = ['none']
facecolor = styles['facecolor']
if np.size(facecolor) == 0:
facecolor = ['none']
elements = [paths, path_transforms, offsets,
edgecolor, styles['linewidth'], facecolor]
it = itertools
return it.islice(py3k.zip(*py3k.map(it.cycle, elements)), N)
def draw_path_collection(self, paths, path_coordinates, path_transforms,
offsets, offset_coordinates, offset_order,
styles, mplobj=None):
"""
Draw a collection of paths. The paths, offsets, and styles are all
iterables, and the number of paths is max(len(paths), len(offsets)).
By default, this is implemented via multiple calls to the draw_path()
function. For efficiency, Renderers may choose to customize this
implementation.
Examples of path collections created by matplotlib are scatter plots,
histograms, contour plots, and many others.
Parameters
----------
paths : list
list of tuples, where each tuple has two elements:
(data, pathcodes). See draw_path() for a description of these.
path_coordinates: string
the coordinates code for the paths, which should be either
'data' for data coordinates, or 'figure' for figure (pixel)
coordinates.
path_transforms: array_like
an array of shape (*, 3, 3), giving a series of 2D Affine
transforms for the paths. These encode translations, rotations,
and scalings in the standard way.
offsets: array_like
An array of offsets of shape (N, 2)
offset_coordinates : string
the coordinates code for the offsets, which should be either
'data' for data coordinates, or 'figure' for figure (pixel)
coordinates.
offset_order : string
either "before" or "after". This specifies whether the offset
is applied before the path transform, or after. The matplotlib
backend equivalent is "before"->"data", "after"->"screen".
styles: dictionary
A dictionary in which each value is a list of length N, containing
the style(s) for the paths.
mplobj : matplotlib object
the matplotlib plot element which generated this collection
"""
if offset_order == "before":
raise NotImplementedError("offset before transform")
for tup in self._iter_path_collection(paths, path_transforms,
offsets, styles):
(path, path_transform, offset, ec, lw, fc) = tup
vertices, pathcodes = path
path_transform = transforms.Affine2D(path_transform)
vertices = path_transform.transform(vertices)
# This is a hack:
if path_coordinates == "figure":
path_coordinates = "points"
style = {"edgecolor": utils.export_color(ec),
"facecolor": utils.export_color(fc),
"edgewidth": lw,
"dasharray": "10,0",
"alpha": styles['alpha'],
"zorder": styles['zorder']}
self.draw_path(data=vertices, coordinates=path_coordinates,
pathcodes=pathcodes, style=style, offset=offset,
offset_coordinates=offset_coordinates,
mplobj=mplobj)
def draw_markers(self, data, coordinates, style, label, mplobj=None):
"""
Draw a set of markers. By default, this is done by repeatedly
calling draw_path(), but renderers should generally overload
this method to provide a more efficient implementation.
In matplotlib, markers are created using the plt.plot() command.
Parameters
----------
data : array_like
A shape (N, 2) array of datapoints.
coordinates : string
A string code, which should be either 'data' for data coordinates,
or 'figure' for figure (pixel) coordinates.
style : dictionary
a dictionary specifying the appearance of the markers.
mplobj : matplotlib object
the matplotlib plot element which generated this marker collection
"""
vertices, pathcodes = style['markerpath']
pathstyle = dict((key, style[key]) for key in ['alpha', 'edgecolor',
'facecolor', 'zorder',
'edgewidth'])
pathstyle['dasharray'] = "10,0"
for vertex in data:
self.draw_path(data=vertices, coordinates="points",
pathcodes=pathcodes, style=pathstyle,
offset=vertex, offset_coordinates=coordinates,
mplobj=mplobj)
def draw_text(self, text, position, coordinates, style,
text_type=None, mplobj=None):
"""
Draw text on the image.
Parameters
----------
text : string
The text to draw
position : tuple
The (x, y) position of the text
coordinates : string
A string code, which should be either 'data' for data coordinates,
or 'figure' for figure (pixel) coordinates.
style : dictionary
a dictionary specifying the appearance of the text.
text_type : string or None
if specified, a type of text such as "xlabel", "ylabel", "title"
mplobj : matplotlib object
the matplotlib plot element which generated this text
"""
raise NotImplementedError()
def draw_path(self, data, coordinates, pathcodes, style,
offset=None, offset_coordinates="data", mplobj=None):
"""
Draw a path.
In matplotlib, paths are created by filled regions, histograms,
contour plots, patches, etc.
Parameters
----------
data : array_like
A shape (N, 2) array of datapoints.
coordinates : string
A string code, which should be either 'data' for data coordinates,
'figure' for figure (pixel) coordinates, or "points" for raw
point coordinates (useful in conjunction with offsets, below).
pathcodes : list
A list of single-character SVG pathcodes associated with the data.
Path codes are one of ['M', 'm', 'L', 'l', 'Q', 'q', 'T', 't',
'S', 's', 'C', 'c', 'Z', 'z']
See the SVG specification for details. Note that some path codes
consume more than one datapoint (while 'Z' consumes none), so
in general, the length of the pathcodes list will not be the same
as that of the data array.
style : dictionary
a dictionary specifying the appearance of the line.
offset : list (optional)
the (x, y) offset of the path. If not given, no offset will
be used.
offset_coordinates : string (optional)
A string code, which should be either 'data' for data coordinates,
or 'figure' for figure (pixel) coordinates.
mplobj : matplotlib object
the matplotlib plot element which generated this path
"""
raise NotImplementedError()
def draw_image(self, imdata, extent, coordinates, style, mplobj=None):
"""
Draw an image.
Parameters
----------
imdata : string
base64 encoded png representation of the image
extent : list
the axes extent of the image: [xmin, xmax, ymin, ymax]
coordinates: string
A string code, which should be either 'data' for data coordinates,
or 'figure' for figure (pixel) coordinates.
style : dictionary
a dictionary specifying the appearance of the image
mplobj : matplotlib object
the matplotlib plot object which generated this image
"""
raise NotImplementedError()
| plotly/python-api | packages/python/plotly/plotly/matplotlylib/mplexporter/renderers/base.py | Python | mit | 14,613 |
# -*- coding: utf-8 -*-
{
'name': "cnpe_meeting",
'summary': """
CNPE meeting schdule and meeting room booking tools.
For tsc only""",
'description': """
CNPE meeting schdule and meeting room booking tools.
- create meeting room.
- meeting room booking.
- meeting scheduling.
- meeting room managing.
""",
'author': "Mack Fire",
'website': "http://tsc.cnpe.cc",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'hr',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base','auth_ldap'],
# always loaded
'data': [
'security/ir.model.access.csv',
'views/cnpe_meeting_templates.xml',
'views/cnpe_meeting_views.xml',
'data/cnpe_meeting_data.xml',
],
# only loaded in demonstration mode
'demo': [
'data/cnpe_meeting_demo.xml',
],
}
| MackZxh/cnpe_meeting | __openerp__.py | Python | agpl-3.0 | 1,077 |
def add_newline_segment(powerline):
powerline.append("\n", Color.RESET, Color.RESET, separator='')
| tswsl1989/powerline-shell | segments/newline.py | Python | mit | 103 |
import facebook
import math
import requests
from pprint import pprint
from MyData import *
VeryBigNum = math.pow(10,10)
graph = facebook.GraphAPI(access_token=access_token,version='2.6')
#pprint(graph.get_connections(id='me',connection_name='feed'))
# For all photos uploaded by user
#pprint(graph.get_connections(id='me',connection_name='photos',type='uploaded',limit=VeryBigNum))
'''
## Downloading all photos in FBAlbum by providing album ID
## Getting all photo ids for particular album
data = graph.get_connections(album_id,connection_name='photos')['data']
edgeList = []
for i in data:
edgeList.append(i['id'])
pprint(edgeList)
for i in range(0,len(edgeList)):
f = open('FacebookImg/'+str(i)+'.jpg','wb')
f.write(requests.get('https://www.facebook.com/photo/download/?fbid='+str(edgeList[i])).content)
f.close()
'''
## returns Like info about photo
#pprint(graph.get_connections(photo_id,connection_name='likes',limit=VeryBigNum))
#pprint(graph.get_connections(id='me',connection_name='friends',limit=VeryBigNum))
'''
f = open('1.jpg','wb')
f.write(requests.get('https://www.facebook.com/photo/download/?fbid=766830926709653').content)
f.close()
# Get details about any authenticated post
post = graph.get_object(fb_user_id+photo_id)
print(post)
'''
'''
# returning friend count and some other wierd stuff
friends = graph.get_connections(id='me',connection_name='')
print(friends)
'''
'''
## Below Also returns post_id and Comment_id after posting and commenting :P
# Post message on FEED
num = graph.put_object(parent_object='me',connection_name='feed',message='Hello World! through Graph')
# Comment and Reply on Post pointed by Parent_object to which post_id is assigned
cmnt1 =graph.put_object(parent_object=num['id'],connection_name='comments',message='Great')
cmnt2 = graph.put_object(parent_object=cmnt1['id'],connection_name='comments',message='Great')
print(num)
print(cmnt1)
'''
'''
attachment = {
'name': 'Random Name',
'link': 'https://www.yoman.com',
'caption': 'Yo Man!',
'description': 'Its Hot Today',
'picture': 'http://i.imgur.com/kUCMPWX.jpg'
}
print(graph.put_wall_post(message='boom',attachment=attachment))
''' | viveksb007/PythonScripts | FacebookGraphAPI.py | Python | gpl-3.0 | 2,196 |
#####################################################################################
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Unless a separate license agreement exists between you and Crossbar.io GmbH (e.g.
# you have purchased a commercial license), the license terms below apply.
#
# Should you enter into a separate license agreement after having received a copy of
# this software, then the terms of such license agreement replace the terms below at
# the time at which such license agreement becomes effective.
#
# In case a separate license agreement ends, and such agreement ends without being
# replaced by another separate license agreement, the license terms below apply
# from the time at which said agreement ends.
#
# LICENSE TERMS
#
# This program is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License, version 3, as published by the
# Free Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU Affero General Public License Version 3 for more details.
#
# You should have received a copy of the GNU Affero General Public license along
# with this program. If not, see <http://www.gnu.org/licenses/agpl-3.0.en.html>.
#
#####################################################################################
from __future__ import absolute_import
from autobahn import util
from autobahn.wamp import types
from txaio import make_logger
from crossbar.router.auth.pending import PendingAuth
__all__ = ('PendingAuthAnonymous',)
class PendingAuthAnonymous(PendingAuth):
"""
Pending authentication information for WAMP-Anonymous authentication.
"""
log = make_logger()
AUTHMETHOD = u'anonymous'
def hello(self, realm, details):
# remember the realm the client requested to join (if any)
self._realm = realm
# remember the authid the client wants to identify as (if any)
self._authid = details.authid or util.generate_serial_number()
self._session_details[u'authmethod'] = u'anonymous'
self._session_details[u'authextra'] = details.authextra
# WAMP-anonymous "static"
if self._config[u'type'] == u'static':
self._authprovider = u'static'
# FIXME: if cookie tracking is enabled, set authid to cookie value
# self._authid = self._transport._cbtid
principal = {
u'authid': self._authid,
u'role': details.authrole or self._config.get(u'role', u'anonymous'),
u'extra': details.authextra
}
error = self._assign_principal(principal)
if error:
return error
return self._accept()
# WAMP-Ticket "dynamic"
elif self._config[u'type'] == u'dynamic':
self._authprovider = u'dynamic'
error = self._init_dynamic_authenticator()
if error:
return error
d = self._authenticator_session.call(self._authenticator, self._realm, self._authid, self._session_details)
def on_authenticate_ok(principal):
error = self._assign_principal(principal)
if error:
return error
return self._accept()
def on_authenticate_error(err):
return self._marshal_dynamic_authenticator_error(err)
d.addCallbacks(on_authenticate_ok, on_authenticate_error)
return d
else:
# should not arrive here, as config errors should be caught earlier
return types.Deny(message=u'invalid authentication configuration (authentication type "{}" is unknown)'.format(self._config['type']))
| NinjaMSP/crossbar | crossbar/router/auth/anonymous.py | Python | agpl-3.0 | 3,914 |
# Make directory a Python package.
| Donkyhotay/MoonPy | zope/app/testing/__init__.py | Python | gpl-3.0 | 35 |
from distutils.core import setup
setup(name='pyslight',
packages=['pyslight'],
author='Daniel Ruschel Dutra',
license='GPLv3',
description='Set of functions to work with Starlight')
| danielrd6/pyslight | setup.py | Python | gpl-3.0 | 207 |
#!/usr/bin/env python
import glob
import numpy as np
import mygis
from bunch import Bunch
# icar[:,4:,1:])*10-wrf[:,:-6,:-4]
def load_wrf(filename, preciponly=False):
"""docstring for load_wrf"""
precip=mygis.read_nc(filename,"RAINNC").data
try:
precip+=mygis.read_nc(filename,"I_RAINNC").data*100 # try to add bucket data
except KeyError:
pass
if preciponly:
return Bunch(precip=precip,hgt=None)
t=mygis.read_nc(filename,"T").data+300
qv=mygis.read_nc(filename,"QVAPOR").data
p=mygis.read_nc(filename,"P").data
p+=mygis.read_nc(filename,"PB").data
u=mygis.read_nc(filename,"U").data
v=mygis.read_nc(filename,"V").data
w=mygis.read_nc(filename,"W").data
z=mygis.read_nc(filename,"PH").data
z+=mygis.read_nc(filename,"PHB").data
z/=9.8
hgt=mygis.read_nc(filename,"HGT").data
hgt=hgt[0,:-6,:-4]
p=p[:,:,:-6,:-4]
t=t[:,:,:-6,:-4]
qv=qv[:,:,:-6,:-4]
u=u[:,:,:-6,:-4]
v=v[:,:,:-6,:-4]
w=w[:,:,:-6,:-4]
precip=precip[:,:-6,:-4]
return Bunch(w=w,z=z,hgt=hgt,u=u,v=v,precip=precip,p=p,qv=qv,t=t)
def load_icar(filename,preciponly=False):
"""docstring for load_wrf"""
precip=mygis.read_nc(filename,"rain").data
if preciponly:
return Bunch(precip=precip)
u=mygis.read_nc(filename,"u").data
v=mygis.read_nc(filename,"v").data
w=mygis.read_nc(filename,"w").data
z=mygis.read_nc(filename,"z").data
dz=np.zeros(z.shape)
dx=4000.0
dz[:,0,:]=z[:,0,:]*2
for i in range(1,z.shape[1]):
dz[:,i,:] = 2*(z[:,i,:]-z[:,i-1,:]) - dz[:,i-1,:]
w[:,:,i,:] *= dz[np.newaxis,:,i,:] / dx
dzdx=np.diff(z,axis=2)
dzdy=np.diff(z,axis=0)
w_u=u[:,:,:,:-2]*dzdx[np.newaxis,:,:,:]/dx
w_u=(w_u[:,:,:,1:]+w_u[:,:,:,:-1])/2.0
w_v=v[:,:-2,:,:]*dzdy[np.newaxis,:,:,:]/dx
w_v=(w_v[:,1:,:,:]+w_v[:,:-1,:,:])/2.0
w[:,1:-1,:,1:-1]+=w_u[:,1:-1,:,:]+w_v[:,:,:,1:-1]
qv=mygis.read_nc(filename,"qv").data
t=mygis.read_nc(filename,"th").data
p=mygis.read_nc(filename,"p").data
w=w[:,4:,:,1:]
v=v[:,4:,:,1:]
u=u[:,4:,:,1:]
z=z[4:,:,1:]
precip=precip[:,4:,1:]
t=t[:,4:,:,1:]
qv=qv[:,4:,:,1:]
p=p[:,4:,:,1:]
return Bunch(z=z,w=w,u=u,v=v,qv=qv,t=t,p=p,precip=precip)
def load_multi(fnames,load_func=load_icar):
"""docstring for load_multi"""
if type(fnames)!=list:
fnames=glob.glob(fnames)
fnames.sort()
data=[]
for f in fnames:
print(f)
data.append(load_func(f))
master_data=Bunch()
print(data[0].keys())
for k in data[0].keys():
print(k)
if (k=="z") or (k=="hgt"):
master_data[k]=data[0][k]
else:
if k=="precip":
nt,ny,nx=data[0][k].shape
master_data[k]=np.zeros((nt*len(data),ny,nx))
else:
nt,ny,nz,nx=data[0][k].shape
master_data[k]=np.zeros((nt*len(data),ny,nz,nx))
for i in range(len(data)):
master_data[k][i*nt:(i+1)*nt]=data[i][k]
return master_data
| NCAR/icar | helpers/wrf/load_comparison.py | Python | mit | 3,192 |
from django.shortcuts import render
from django.http import HttpResponse
from .models import UserFile, Errors
import md5
# Create your views here.
def index(request):
return render(request, 'xmlchecker/index.html')
def check(request):
obj = UserFile.objects.create(xml_text=str(request.POST.items()[2][1].encode('utf-8')))
obj.save()
hash_str = str(obj)
err_obj = Errors.objects.create(errors='<br/>'.join(obj.errors), _id=hash_str)
return HttpResponse('Errors:' + err_obj.errors)
| Szboy/OCN-XMLChecker | xmlchecker/views.py | Python | gpl-2.0 | 488 |
#Some functions to help analysing DUT-8
import numpy as np
import sys
from ase import Atoms, neighborlist
import os
import pickle
def buildNL(mol, path='./', radii=None, save=True):
#create nl
if radii is None:
radii = {}
radii[ 'H'] = 0.30
radii[ 'C'] = 0.77
radii[ 'N'] = 0.70
radii[ 'O'] = 0.66
radii['Ni'] = 1.24
nAtoms = len(mol)
if (not os.path.isfile(os.path.join(path, 'neighborList.pickle'))) or (not save):
#create a list of cutoffs
cutOff = []
for j in range(0,nAtoms):
cutOff.append(radii[mol[j].symbol])
#initiate neighborlist
neighborList = neighborlist.NeighborList(cutOff,self_interaction=False,bothways=True)
neighborList.update(mol)
if save:
with open(os.path.join(path, 'neighborList.pickle'),'wb') as f:
pickle.dump(neighborList,f)
elif save:
with open(os.path.join(path, 'neighborList.pickle'),'rb') as f:
neighborList = pickle.load(f)
print("Bond Map created")
return neighborList
def getBenzNiAngleList(ana, imI):
"""Make a list of all occurences"""
if isinstance(imI, int):
imI = [imI]
dihedralList = []
for i in imI:
dihedralList.append([])
nAtoms = len(ana.images[i])
molecule = ana.images[i]
bondList = ana.all_bonds[i]
relDihedrals = ana.get_dihedrals('Ni', 'Ni', 'O', 'C')[i]
for dihed in relDihedrals:
nextC = [ idx for idx in bondList[dihed[-1]] if molecule[idx].symbol == 'C']
assert len(nextC) == 1
attachedCs = [ idx for idx in bondList[nextC[0]] if (molecule[idx].symbol == 'C') and (idx != dihed[-1])]
assert len(attachedCs) == 2
dists = [ molecule.get_distance(dihed[0], idx, mic=True) for idx in attachedCs ]
if dists[0] < dists[1]:
dihedralList[-1].append((dihed[0], dihed[1], attachedCs[1], attachedCs[0]))
else:
dihedralList[-1].append((dihed[0], dihed[1], attachedCs[0], attachedCs[1]))
return dihedralList
def getAlphaList(ana, imI):
"""Make a list of all occurences"""
from itertools import combinations
allIdx = ana._get_symbol_idxs(imI, 'Ni')
nAtoms = len(ana.images[imI])
molecule = ana.images[imI]
bondList = ana.all_bonds[imI]
dihedrals = ana.get_dihedrals('Ni', 'O', 'C', 'C')[0]
alphaList = []
for niIdx in allIdx:
relDihedrals = [ d for d in dihedrals if d[0] == niIdx ]
assert len(relDihedrals) == 4
s = set([d[-1] for d in relDihedrals])
if s not in alphaList:
alphaList.append(s)
r = []
for l in alphaList:
r.extend(list(combinations(list(l), 2)))
return [r]
def checkBonds(mol, bondList, cluster=False):
for iAtom, atom in enumerate(mol):
bondedIdx = bondList[iAtom]
bondedSym = [ mol[idx].symbol for idx in bondedIdx ]
if atom.symbol == 'C':
check = [ False, False, False, False ]
if (set(bondedSym) == set(['C','H'])) and (len(bondedIdx) == 3):
check[0] = True
elif (set(bondedSym) == set(['C','H','N'])) and (len(bondedIdx) == 4):
check[1] = True
elif (set(bondedSym) == set(['C'])) and (len(bondedIdx) == 3):
check[2] = True
elif (set(bondedSym) == set(['C','O'])) and (len(bondedIdx) == 3):
check[3] = True
if not any(check):
raise RuntimeError("Atom {:} ({:}) bonded to {:}, these are {:}".format(iAtom, atom.symbol,str(bondedIdx),str(bondedSym)))
elif atom.symbol == 'H':
if ((set(bondedSym) != set(['C'])) and (set(bondedSym) != set(['N']))) or (len(bondedIdx) != 1):
raise RuntimeError("Atom {:} ({:}) bonded to {:}, these are {:}".format(iAtom, atom.symbol,str(bondedIdx),str(bondedSym)))
elif atom.symbol == 'N':
if cluster:
if ((set(bondedSym) != set(['C','Ni'])) and (set(bondedSym) != set(['H','Ni'])) and (set(bondedSym) != set(['C']))) or (not len(bondedIdx) in [4,3]):
raise RuntimeError("Atom {:} ({:}) bonded to {:}, these are {:}".format(iAtom, atom.symbol,str(bondedIdx),str(bondedSym)))
else:
if ((set(bondedSym) != set(['C','Ni'])) and (set(bondedSym) != set(['H','Ni']))) or (len(bondedIdx) != 4):
raise RuntimeError("Atom {:} ({:}) bonded to {:}, these are {:}".format(iAtom, atom.symbol,str(bondedIdx),str(bondedSym)))
elif atom.symbol == 'O':
if (set(bondedSym) != set(['C','Ni'])) or (len(bondedIdx) != 2):
raise RuntimeError("Atom {:} ({:}) bonded to {:}, these are {:}".format(iAtom, atom.symbol,str(bondedIdx),str(bondedSym)))
elif atom.symbol == 'Ni':
if (set(bondedSym) != set(['Ni','O','N'])) or (len(bondedIdx) != 6):
raise RuntimeError("Atom {:} ({:}) bonded to {:}, these are {:}".format(iAtom, atom.symbol,str(bondedIdx),str(bondedSym)))
return True
| patrickmelix/Python4ChemistryTools | moffunctions.py | Python | mit | 5,110 |
from lettuce import world
from salad.logger import logger
from splinter.exceptions import ElementDoesNotExist
ELEMENT_FINDERS = {
'named "(.*)"': "find_by_name",
'with(?: the)? id "(.*)"': "find_by_id",
'with(?: the)? css selector "(.*)"': "find_by_css",
'with(?: the)? xpath selector': "find_by_xpath",
'with(?: the)? value (.*)': "find_by_value",
}
LINK_FINDERS = {
'to "(.*)"': "find_link_by_href",
'to a url that contains "(.*)"': "find_link_by_partial_href",
'with(?: the)? text "(.*)"': "find_link_by_text",
'with text that contains "(.*)"': "find_link_by_partial_text",
}
ELEMENT_THING_STRING = "(?:element|thing|field|textarea|radio button|button|checkbox|label)"
LINK_THING_STRING = "link"
def _get_element(finder_function, first, last, pattern, expect_not_to_find=False, leave_in_list=False):
element = getattr(world.browser, finder_function)(pattern)
try:
if first:
element = element.first
if last:
element = element.last
if not "WebDriverElement" in "%s" % type(element):
if len(element) > 1:
logger.warn("More than one element found when looking for %s for %s. Using the first one. " % (finder_function, pattern))
if not leave_in_list:
element = element.first
except ElementDoesNotExist:
if not expect_not_to_find:
logger.error("Element not found: %s for %s" % (finder_function, pattern))
raise ElementDoesNotExist
world.current_element = element
return element
def _convert_pattern_to_css(finder_function, first, last, find_pattern, tag=""):
pattern = ""
if finder_function == "find_by_name":
pattern += "%s[name='%s']" % (tag, find_pattern, )
elif finder_function == "find_by_id":
pattern += "#%s" % (find_pattern, )
elif finder_function == "find_by_css":
pattern += "%s" % (find_pattern, )
elif finder_function == "find_by_value":
pattern += "%s[value='%s']" % (tag, find_pattern, ) # makes no sense, but consistent.
else:
raise Exception("Unknown pattern.")
if first:
pattern += ":first"
if last:
pattern += ":last"
return pattern
| adw0rd/salad-py3 | salad/steps/browser/finders.py | Python | bsd-3-clause | 2,258 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import os.path
__dir__ = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.dirname(__dir__))
import omnicxx
from omnicxx import GetTagsMgr
from omnicxx import CodeComplete
def _ToList(compl_items):
li = []
for item in compl_items:
li.append(item['word'])
return li
def test00(tagmgr):
assert tagmgr.GetTagsByPath('A::B')
fname = os.path.join(__dir__, 'test00.cpp')
with open(fname) as f:
buff = f.read().splitlines()
retmsg = {}
#print _ToList(CodeComplete(fname, buff, 61, 8, tagmgr))
#print retmsg
#print _ToList(CodeComplete(fname, buff, 61, 6, tagmgr, retmsg=retmsg))
#print retmsg
#print _ToList(CodeComplete(fname, buff, 21, 1, tagmgr, retmsg=retmsg))
#print retmsg
cases = [
#([79, 18], []),
([4, 24], ['a', 'af()']),
([61, 6], []),
([61, 24], []),
#([70, 27],[])
([79, 18], []),
([83, 25], []),
([84, 44], []),
([90, 19], []),
([91, 21], []),
([96, 10], []),
([97, 8], []),
([99, 5], []),
]
for pos, result in cases:
row = pos[0]
col = pos[1]
print '=' * 40
print buff[row-1][: col-1].strip()
retmsg = {}
li = _ToList(CodeComplete(fname, buff, row, col, tagmgr, retmsg=retmsg))
print li
if result:
try:
assert li == result
except:
print li, '!=', result
raise
print 'retmsg:', retmsg
def test01(tagmgr):
'''test.cpp 的测试用例'''
#print tagmgr
assert tagmgr.GetTagsByPath('main')
assert tagmgr.GetTagsByPath('INT')
def test02(tagmgr):
pass
def main(argv):
files = []
for item in os.listdir(__dir__):
fname = os.path.join(__dir__, item)
if not os.path.isfile(fname):
continue
if os.path.splitext(fname)[1] in set(['.c', '.cpp', '.h', '.hpp', '.cxx']):
files.append(fname)
tagmgr = GetTagsMgr(':memory:')
#tagmgr = GetTagsMgr('test.db')
for fname in files:
if not os.path.basename(fname).startswith('test'):
continue
tagmgr.RecreateDatabase()
tagmgr.ParseFiles([fname])
name = os.path.splitext(os.path.basename(fname))[0]
eval('%s(tagmgr)' % name)
if __name__ == '__main__':
import sys
ret = main(sys.argv)
if ret is None:
ret = 0
sys.exit(ret)
| fanhed/omnicxx | test/main.py | Python | gpl-2.0 | 2,547 |
# Copyright (C) 2010, 2011 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import re
from webkit import model
def combine_condition(conditions):
if conditions:
if len(conditions) == 1:
return conditions[0]
else:
return bracket_if_needed(' && '.join(map(bracket_if_needed, conditions)))
else:
return None
def bracket_if_needed(condition):
if re.match(r'.*(&&|\|\|).*', condition):
return '(%s)' % condition
else:
return condition
def parse(file):
receiver_attributes = None
destination = None
messages = []
conditions = []
master_condition = None
superclass = []
for line in file:
match = re.search(r'messages -> (?P<destination>[A-Za-z_0-9]+) \s*(?::\s*(?P<superclass>.*?) \s*)?(?:(?P<attributes>.*?)\s+)?{', line)
if match:
receiver_attributes = parse_attributes_string(match.group('attributes'))
if match.group('superclass'):
superclass = match.group('superclass')
if conditions:
master_condition = conditions
conditions = []
destination = match.group('destination')
continue
if line.startswith('#'):
trimmed = line.rstrip()
if line.startswith('#if '):
conditions.append(trimmed[4:])
elif line.startswith('#endif') and conditions:
conditions.pop()
elif line.startswith('#else') or line.startswith('#elif'):
raise Exception("ERROR: '%s' is not supported in the *.in files" % trimmed)
continue
match = re.search(r'([A-Za-z_0-9]+)\((.*?)\)(?:(?:\s+->\s+)\((.*?)\))?(?:\s+(.*))?', line)
if match:
name, parameters_string, reply_parameters_string, attributes_string = match.groups()
if parameters_string:
parameters = parse_parameters_string(parameters_string)
for parameter in parameters:
parameter.condition = combine_condition(conditions)
else:
parameters = []
attributes = parse_attributes_string(attributes_string)
if reply_parameters_string:
reply_parameters = parse_parameters_string(reply_parameters_string)
for reply_parameter in reply_parameters:
reply_parameter.condition = combine_condition(conditions)
elif reply_parameters_string == '':
reply_parameters = []
else:
reply_parameters = None
messages.append(model.Message(name, parameters, reply_parameters, attributes, combine_condition(conditions)))
return model.MessageReceiver(destination, superclass, receiver_attributes, messages, combine_condition(master_condition))
def parse_attributes_string(attributes_string):
if not attributes_string:
return None
return attributes_string.split()
def split_parameters_string(parameters_string):
parameters = []
current_parameter_string = ''
nest_level = 0
for character in parameters_string:
if character == ',' and nest_level == 0:
parameters.append(current_parameter_string)
current_parameter_string = ''
continue
if character == '<':
nest_level += 1
elif character == '>':
nest_level -= 1
current_parameter_string += character
parameters.append(current_parameter_string)
return parameters
def parse_parameters_string(parameters_string):
parameters = []
for parameter_string in split_parameters_string(parameters_string):
match = re.search(r'\s*(?:\[(?P<attributes>.*?)\]\s+)?(?P<type_and_name>.*)', parameter_string)
attributes_string, type_and_name_string = match.group('attributes', 'type_and_name')
split = type_and_name_string.rsplit(' ', 1)
parameter_kind = 'class'
if split[0].startswith('struct '):
parameter_kind = 'struct'
split[0] = split[0][7:]
parameter_type = split[0]
parameter_name = split[1]
parameters.append(model.Parameter(kind=parameter_kind, type=parameter_type, name=parameter_name, attributes=parse_attributes_string(attributes_string)))
return parameters
| annulen/qtwebkit-snapshots | Source/WebKit2/Scripts/webkit/parser.py | Python | lgpl-2.1 | 5,618 |
class Coupon:
def __init__(self, coupon_name, date, number_of_coupons):
self.coupon_name = coupon_name
self.date = date
self.number_of_coupons = number_of_coupons | IlyaSiz/python_training | model/coupon.py | Python | gpl-3.0 | 191 |
"""Change custom form terms to text.
Revision ID: 68423db114cd
Revises: 7135c4a2339e
Create Date: 2018-09-19 21:29:08.800064
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, relationship
# revision identifiers, used by Alembic.
revision = '68423db114cd'
down_revision = '7135c4a2339e'
Base = declarative_base()
db = sa
db.Model = Base
db.relationship = relationship
def create_session():
connection = op.get_bind()
session_maker = sa.orm.sessionmaker()
session = session_maker(bind=connection)
db.session = session
def upgrade():
create_session()
op.alter_column('custom_form', 'terms', type_=sa.Text,
existing_type=sa.String)
pass
def downgrade():
create_session()
op.alter_column('custom_form', 'terms', type_=sa.String,
existing_type=sa.Text)
pass
# vim: ft=python
| viaict/viaduct | migrations/versions/2018_09_19_68423db114cd_change_custom_form_terms_to_text.py | Python | mit | 961 |
from __future__ import print_function
# Time: O(n)
# Space: O(1)
#
# Given a linked list, swap every two adjacent nodes and return its head.
#
# For example,
# Given 1->2->3->4, you should return the list as 2->1->4->3.
#
# Your algorithm should use only constant space.
# You may not modify the values in the list, only nodes itself can be changed.
#
# Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
def __repr__(self):
if self:
return "{} -> {}".format(self.val, self.next)
class Solution:
# @param a ListNode
# @return a ListNode
def swapPairs(self, head):
dummy = ListNode(0)
dummy.next = head
current = dummy
while current.next and current.next.next:
next_one, next_two, next_three = current.next, current.next.next, current.next.next.next
current.next = next_two
next_two.next = next_one
next_one.next = next_three
current = next_one
return dummy.next
if __name__ == "__main__":
head = ListNode(1)
head.next, head.next.next, head.next.next.next = ListNode(2), ListNode(3), ListNode(4)
print(Solution().swapPairs(head))
| kamyu104/LeetCode | Python/swap-nodes-in-pairs.py | Python | mit | 1,255 |
#!/usr/bin/env python
# Entry file for search article with user's key word.
import sys
import json
import cgi
from article_engin import get_articles
from connect_database import getDatabase
fs = cgi.FieldStorage()
sys.stdout.write("Content-Type: application/json")
sys.stdout.write("\n")
sys.stdout.write("\n")
result = {}
result['success'] = True
data = ''
for k in fs.keys():
data+=fs.getvalue(k)
#get_articles(eval(data))
titles = get_articles(eval(data))
article_data=[]
client = getDatabase()
db = client.lyl
result["title"]=[]
result["url"]=[]
result["tags"]=[]
result["section"]=[]
for title in titles:
article = db.link_content.find_one({"title":title})
result["title"] += [article["title"]]
result["url"] += [article["url"]]
result["tags"] += [article["tags"]]
result["section"] += [article["section"]]
client.close()
sys.stdout.write(json.dumps(result,indent=1))
sys.stdout.write("\n")
sys.stdout.close() | leaot/Huffpost-Articles-in-Twitter-Trends | Python/user_articles_entry.py | Python | mit | 946 |
# -*- coding: utf-8 -*-
#!/usr/bin/env python
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import sight_reading
version = sight_reading.__version__
setup(
name='Sight Reading',
version=version,
author='',
author_email='[email protected]',
packages=[
'sight_reading',
],
include_package_data=True,
install_requires=[
'Django>=1.6.1',
],
zip_safe=False,
scripts=['sight_reading/manage.py'],
) | v-for-vincent/sight-reading | setup.py | Python | bsd-3-clause | 529 |
import tables
import numpy as np
from utils.coords import local2eq
print("IC86")
deg5 = np.radians(5.)
pull = lambda X: (79. - 86.7 * X + 38.45 * X**2 - 8.673 * X**3
+ 1.056 * X**4 - 0.0658 * X**5 + 0.00165 * X**6)
hdf = tables.openFile("./PrunedDataUpgoing.hd5")
f = hdf.root
data = f.SplineMPEParaboloidFitParams.cols
pbf_status = ((data.err1[:] >= 0)&(data.err2[:] >= 0))
paraboloid_sigma = np.sqrt(data.err1[:]**2 + data.err2[:]**2) / np.sqrt(2)
print("\t\t{0:7.2%} Paraboloid OK".format(
np.sum(pbf_status, dtype=np.float) / len(pbf_status)))
pbf_status2 = data.status[:] >= 0
print("\t\t{0:7.2%} Paraboloid not complete fail".format(
np.sum(pbf_status2, dtype=np.float) / len(pbf_status2)))
arr = np.empty((np.sum(pbf_status), ), dtype=[("run", np.int),
("event", np.int),
("ra", np.float),
("dec", np.float),
("azimuth",np.float),
("zenith",np.float),
("logE", np.float),
("sigma", np.float),
("time", np.float)])
arr["run"] = f.I3EventHeader.cols.Run[:][pbf_status]
arr["event"] = f.I3EventHeader.cols.Event[:][pbf_status]
data = f.SplineMPE.cols
zen = data.zenith[:][pbf_status]
phi = data.azimuth[:][pbf_status]
arr["azimuth"] = data.azimuth[:][pbf_status]
arr["zenith"] = data.zenith[:][pbf_status]
data = f
arr["time"] = data.timeMJD.cols.value[:][pbf_status]
arr["ra"], arr["dec"] = local2eq(zen, phi, arr["time"])
data = f.SplineMPEMuEXDifferential.cols
arr["logE"] = np.log10(data.energy[:][pbf_status])
arr["sigma"] = paraboloid_sigma[:][pbf_status] * pull(arr["logE"])
sigma_status = arr["sigma"] < deg5
print("\t5deg threshold: {0:7.2%}".format(
np.sum(sigma_status, dtype=np.float) / len(sigma_status)))
arr = arr[sigma_status]
print "Paraboloid:", np.degrees(np.percentile(arr["sigma"],
[0., 50., 90., 95., 100.]))
hdf.close()
print("\t{0:6d} events".format(len(arr)))
np.save("IC86_exp.npy", arr)
| thejevans/pointSourceAnalysis | oldConvertH5.py | Python | gpl-3.0 | 2,280 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import json
import os
import re
import unittest
from django.contrib.admin import AdminSite, ModelAdmin
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.models import ADDITION, DELETION, LogEntry
from django.contrib.admin.options import TO_FIELD_VAR
from django.contrib.admin.templatetags.admin_static import static
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.contrib.admin.tests import AdminSeleniumTestCase
from django.contrib.admin.utils import quote
from django.contrib.admin.views.main import IS_POPUP_VAR
from django.contrib.auth import REDIRECT_FIELD_NAME, get_permission_codename
from django.contrib.auth.models import Group, Permission, User
from django.contrib.contenttypes.models import ContentType
from django.contrib.staticfiles.storage import staticfiles_storage
from django.core import mail
from django.core.checks import Error
from django.core.files import temp as tempfile
from django.forms.utils import ErrorList
from django.template.loader import render_to_string
from django.template.response import TemplateResponse
from django.test import (
SimpleTestCase, TestCase, ignore_warnings, modify_settings,
override_settings, skipUnlessDBFeature,
)
from django.test.utils import override_script_prefix, patch_logger
from django.urls import NoReverseMatch, resolve, reverse
from django.utils import formats, six, translation
from django.utils._os import upath
from django.utils.cache import get_max_age
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_bytes, force_text, iri_to_uri
from django.utils.html import escape
from django.utils.http import urlencode
from django.utils.six.moves.urllib.parse import parse_qsl, urljoin, urlparse
from . import customadmin
from .admin import CityAdmin, site, site2
from .models import (
Actor, AdminOrderedAdminMethod, AdminOrderedCallable, AdminOrderedField,
AdminOrderedModelMethod, Answer, Article, BarAccount, Book, Bookmark,
Category, Chapter, ChapterXtra1, ChapterXtra2, Character, Child, Choice,
City, Collector, Color, ComplexSortedPerson, CoverLetter, CustomArticle,
CyclicOne, CyclicTwo, DooHickey, Employee, EmptyModel, ExternalSubscriber,
Fabric, FancyDoodad, FieldOverridePost, FilteredManager, FooAccount,
FoodDelivery, FunkyTag, Gallery, Grommet, Inquisition, Language, Link,
MainPrepopulated, ModelWithStringPrimaryKey, OtherStory, Paper, Parent,
ParentWithDependentChildren, ParentWithUUIDPK, Person, Persona, Picture,
Pizza, Plot, PlotDetails, PluggableSearchPerson, Podcast, Post,
PrePopulatedPost, Promo, Question, Recommendation, Recommender,
RelatedPrepopulated, RelatedWithUUIDPKModel, Report, Restaurant,
RowLevelChangePermissionModel, SecretHideout, Section, ShortMessage,
Simple, State, Story, Subscriber, SuperSecretHideout, SuperVillain,
Telegram, TitleTranslation, Topping, UnchangeableObject, UndeletableObject,
UnorderedObject, Villain, Vodcast, Whatsit, Widget, Worker, WorkHour,
)
ERROR_MESSAGE = "Please enter the correct username and password \
for a staff account. Note that both fields may be case-sensitive."
class AdminFieldExtractionMixin(object):
"""
Helper methods for extracting data from AdminForm.
"""
def get_admin_form_fields(self, response):
"""
Return a list of AdminFields for the AdminForm in the response.
"""
admin_form = response.context['adminform']
fieldsets = list(admin_form)
field_lines = []
for fieldset in fieldsets:
field_lines += list(fieldset)
fields = []
for field_line in field_lines:
fields += list(field_line)
return fields
def get_admin_readonly_fields(self, response):
"""
Return the readonly fields for the response's AdminForm.
"""
return [f for f in self.get_admin_form_fields(response) if f.is_readonly]
def get_admin_readonly_field(self, response, field_name):
"""
Return the readonly field for the given field_name.
"""
admin_readonly_fields = self.get_admin_readonly_fields(response)
for field in admin_readonly_fields:
if field.field['name'] == field_name:
return field
@override_settings(ROOT_URLCONF='admin_views.urls', USE_I18N=True, USE_L10N=False, LANGUAGE_CODE='en')
class AdminViewBasicTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.color1 = Color.objects.create(value='Red', warm=True)
cls.color2 = Color.objects.create(value='Orange', warm=True)
cls.color3 = Color.objects.create(value='Blue', warm=False)
cls.color4 = Color.objects.create(value='Green', warm=False)
cls.fab1 = Fabric.objects.create(surface='x')
cls.fab2 = Fabric.objects.create(surface='y')
cls.fab3 = Fabric.objects.create(surface='plain')
cls.b1 = Book.objects.create(name='Book 1')
cls.b2 = Book.objects.create(name='Book 2')
cls.pro1 = Promo.objects.create(name='Promo 1', book=cls.b1)
cls.pro1 = Promo.objects.create(name='Promo 2', book=cls.b2)
cls.chap1 = Chapter.objects.create(title='Chapter 1', content='[ insert contents here ]', book=cls.b1)
cls.chap2 = Chapter.objects.create(title='Chapter 2', content='[ insert contents here ]', book=cls.b1)
cls.chap3 = Chapter.objects.create(title='Chapter 1', content='[ insert contents here ]', book=cls.b2)
cls.chap4 = Chapter.objects.create(title='Chapter 2', content='[ insert contents here ]', book=cls.b2)
cls.cx1 = ChapterXtra1.objects.create(chap=cls.chap1, xtra='ChapterXtra1 1')
cls.cx2 = ChapterXtra1.objects.create(chap=cls.chap3, xtra='ChapterXtra1 2')
# Post data for edit inline
cls.inline_post_data = {
"name": "Test section",
# inline data
"article_set-TOTAL_FORMS": "6",
"article_set-INITIAL_FORMS": "3",
"article_set-MAX_NUM_FORMS": "0",
"article_set-0-id": cls.a1.pk,
# there is no title in database, give one here or formset will fail.
"article_set-0-title": "Norske bostaver æøå skaper problemer",
"article_set-0-content": "<p>Middle content</p>",
"article_set-0-date_0": "2008-03-18",
"article_set-0-date_1": "11:54:58",
"article_set-0-section": cls.s1.pk,
"article_set-1-id": cls.a2.pk,
"article_set-1-title": "Need a title.",
"article_set-1-content": "<p>Oldest content</p>",
"article_set-1-date_0": "2000-03-18",
"article_set-1-date_1": "11:54:58",
"article_set-2-id": cls.a3.pk,
"article_set-2-title": "Need a title.",
"article_set-2-content": "<p>Newest content</p>",
"article_set-2-date_0": "2009-03-18",
"article_set-2-date_1": "11:54:58",
"article_set-3-id": "",
"article_set-3-title": "",
"article_set-3-content": "",
"article_set-3-date_0": "",
"article_set-3-date_1": "",
"article_set-4-id": "",
"article_set-4-title": "",
"article_set-4-content": "",
"article_set-4-date_0": "",
"article_set-4-date_1": "",
"article_set-5-id": "",
"article_set-5-title": "",
"article_set-5-content": "",
"article_set-5-date_0": "",
"article_set-5-date_1": "",
}
def setUp(self):
self.client.force_login(self.superuser)
def tearDown(self):
formats.reset_format_cache()
def assertContentBefore(self, response, text1, text2, failing_msg=None):
"""
Testing utility asserting that text1 appears before text2 in response
content.
"""
self.assertEqual(response.status_code, 200)
self.assertLess(
response.content.index(force_bytes(text1)),
response.content.index(force_bytes(text2)),
(failing_msg or '') + '\nResponse:\n' + response.content.decode(response.charset)
)
class AdminViewBasicTest(AdminViewBasicTestCase):
def test_trailing_slash_required(self):
"""
If you leave off the trailing slash, app should redirect and add it.
"""
add_url = reverse('admin:admin_views_article_add')
response = self.client.get(add_url[:-1])
self.assertRedirects(response, add_url, status_code=301)
def test_admin_static_template_tag(self):
"""
Test that admin_static.static is pointing to the collectstatic version
(as django.contrib.collectstatic is in installed apps).
"""
old_url = staticfiles_storage.base_url
staticfiles_storage.base_url = '/test/'
try:
self.assertEqual(static('path'), '/test/path')
finally:
staticfiles_storage.base_url = old_url
def test_basic_add_GET(self):
"""
A smoke test to ensure GET on the add_view works.
"""
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_add_with_GET_args(self):
response = self.client.get(reverse('admin:admin_views_section_add'), {'name': 'My Section'})
self.assertContains(
response, 'value="My Section"',
msg_prefix="Couldn't find an input with the right value in the response"
)
def test_basic_edit_GET(self):
"""
A smoke test to ensure GET on the change_view works.
"""
response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_basic_edit_GET_string_PK(self):
"""
Ensure GET on the change_view works (returns an HTTP 404 error, see
#11191) when passing a string as the PK argument for a model with an
integer PK field.
"""
response = self.client.get(reverse('admin:admin_views_section_change', args=('abc',)))
self.assertEqual(response.status_code, 404)
def test_basic_edit_GET_old_url_redirect(self):
"""
The change URL changed in Django 1.9, but the old one still redirects.
"""
response = self.client.get(
reverse('admin:admin_views_section_change', args=(self.s1.pk,)).replace('change/', '')
)
self.assertRedirects(response, reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
def test_basic_inheritance_GET_string_PK(self):
"""
Ensure GET on the change_view works on inherited models (returns an
HTTP 404 error, see #19951) when passing a string as the PK argument
for a model with an integer PK field.
"""
response = self.client.get(reverse('admin:admin_views_supervillain_change', args=('abc',)))
self.assertEqual(response.status_code, 404)
def test_basic_add_POST(self):
"""
A smoke test to ensure POST on add_view works.
"""
post_data = {
"name": "Another Section",
# inline data
"article_set-TOTAL_FORMS": "3",
"article_set-INITIAL_FORMS": "0",
"article_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse('admin:admin_views_section_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_popup_add_POST(self):
"""
Ensure http response from a popup is properly escaped.
"""
post_data = {
'_popup': '1',
'title': 'title with a new\nline',
'content': 'some content',
'date_0': '2010-09-10',
'date_1': '14:55:39',
}
response = self.client.post(reverse('admin:admin_views_article_add'), post_data)
self.assertContains(response, 'title with a new\\nline')
def test_basic_edit_POST(self):
"""
A smoke test to ensure POST on edit_view works.
"""
url = reverse('admin:admin_views_section_change', args=(self.s1.pk,))
response = self.client.post(url, self.inline_post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as(self):
"""
Test "save as".
"""
post_data = self.inline_post_data.copy()
post_data.update({
'_saveasnew': 'Save+as+new',
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-3-section": "1",
"article_set-4-section": "1",
"article_set-5-section": "1",
})
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as_delete_inline(self):
"""
Should be able to "Save as new" while also deleting an inline.
"""
post_data = self.inline_post_data.copy()
post_data.update({
'_saveasnew': 'Save+as+new',
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-2-DELETE": "1",
"article_set-3-section": "1",
})
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), post_data)
self.assertEqual(response.status_code, 302)
# started with 3 articles, one was deleted.
self.assertEqual(Section.objects.latest('id').article_set.count(), 2)
def test_change_list_column_field_classes(self):
response = self.client.get(reverse('admin:admin_views_article_changelist'))
# callables display the callable name.
self.assertContains(response, 'column-callable_year')
self.assertContains(response, 'field-callable_year')
# lambdas display as "lambda" + index that they appear in list_display.
self.assertContains(response, 'column-lambda8')
self.assertContains(response, 'field-lambda8')
def test_change_list_sorting_callable(self):
"""
Ensure we can sort on a list_display field that is a callable
(column 2 is callable_year in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': 2})
self.assertContentBefore(
response, 'Oldest content', 'Middle content',
"Results of sorting on callable are out of order."
)
self.assertContentBefore(
response, 'Middle content', 'Newest content',
"Results of sorting on callable are out of order."
)
def test_change_list_sorting_model(self):
"""
Ensure we can sort on a list_display field that is a Model method
(column 3 is 'model_year' in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-3'})
self.assertContentBefore(
response, 'Newest content', 'Middle content',
"Results of sorting on Model method are out of order."
)
self.assertContentBefore(
response, 'Middle content', 'Oldest content',
"Results of sorting on Model method are out of order."
)
def test_change_list_sorting_model_admin(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin method
(column 4 is 'modeladmin_year' in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '4'})
self.assertContentBefore(
response, 'Oldest content', 'Middle content',
"Results of sorting on ModelAdmin method are out of order."
)
self.assertContentBefore(
response, 'Middle content', 'Newest content',
"Results of sorting on ModelAdmin method are out of order."
)
def test_change_list_sorting_model_admin_reverse(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin
method in reverse order (i.e. admin_order_field uses the '-' prefix)
(column 6 is 'model_year_reverse' in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '6'})
self.assertContentBefore(
response, '2009', '2008',
"Results of sorting on ModelAdmin method are out of order."
)
self.assertContentBefore(
response, '2008', '2000',
"Results of sorting on ModelAdmin method are out of order."
)
# Let's make sure the ordering is right and that we don't get a
# FieldError when we change to descending order
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-6'})
self.assertContentBefore(
response, '2000', '2008',
"Results of sorting on ModelAdmin method are out of order."
)
self.assertContentBefore(
response, '2008', '2009',
"Results of sorting on ModelAdmin method are out of order."
)
def test_change_list_sorting_multiple(self):
p1 = Person.objects.create(name="Chris", gender=1, alive=True)
p2 = Person.objects.create(name="Chris", gender=2, alive=True)
p3 = Person.objects.create(name="Bob", gender=1, alive=True)
link1 = reverse('admin:admin_views_person_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_person_change', args=(p2.pk,))
link3 = reverse('admin:admin_views_person_change', args=(p3.pk,))
# Sort by name, gender
response = self.client.get(reverse('admin:admin_views_person_changelist'), {'o': '1.2'})
self.assertContentBefore(response, link3, link1)
self.assertContentBefore(response, link1, link2)
# Sort by gender descending, name
response = self.client.get(reverse('admin:admin_views_person_changelist'), {'o': '-2.1'})
self.assertContentBefore(response, link2, link3)
self.assertContentBefore(response, link3, link1)
def test_change_list_sorting_preserve_queryset_ordering(self):
"""
If no ordering is defined in `ModelAdmin.ordering` or in the query
string, then the underlying order of the queryset should not be
changed, even if it is defined in `Modeladmin.get_queryset()`.
Refs #11868, #7309.
"""
p1 = Person.objects.create(name="Amy", gender=1, alive=True, age=80)
p2 = Person.objects.create(name="Bob", gender=1, alive=True, age=70)
p3 = Person.objects.create(name="Chris", gender=2, alive=False, age=60)
link1 = reverse('admin:admin_views_person_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_person_change', args=(p2.pk,))
link3 = reverse('admin:admin_views_person_change', args=(p3.pk,))
response = self.client.get(reverse('admin:admin_views_person_changelist'), {})
self.assertContentBefore(response, link3, link2)
self.assertContentBefore(response, link2, link1)
def test_change_list_sorting_model_meta(self):
# Test ordering on Model Meta is respected
l1 = Language.objects.create(iso='ur', name='Urdu')
l2 = Language.objects.create(iso='ar', name='Arabic')
link1 = reverse('admin:admin_views_language_change', args=(quote(l1.pk),))
link2 = reverse('admin:admin_views_language_change', args=(quote(l2.pk),))
response = self.client.get(reverse('admin:admin_views_language_changelist'), {})
self.assertContentBefore(response, link2, link1)
# Test we can override with query string
response = self.client.get(reverse('admin:admin_views_language_changelist'), {'o': '-1'})
self.assertContentBefore(response, link1, link2)
def test_change_list_sorting_override_model_admin(self):
# Test ordering on Model Admin is respected, and overrides Model Meta
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse('admin:admin_views_podcast_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_podcast_change', args=(p2.pk,))
response = self.client.get(reverse('admin:admin_views_podcast_changelist'), {})
self.assertContentBefore(response, link1, link2)
def test_multiple_sort_same_field(self):
# Check that we get the columns we expect if we have two columns
# that correspond to the same ordering field
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse('admin:admin_views_podcast_change', args=(quote(p1.pk),))
link2 = reverse('admin:admin_views_podcast_change', args=(quote(p2.pk),))
response = self.client.get(reverse('admin:admin_views_podcast_changelist'), {})
self.assertContentBefore(response, link1, link2)
p1 = ComplexSortedPerson.objects.create(name="Bob", age=10)
p2 = ComplexSortedPerson.objects.create(name="Amy", age=20)
link1 = reverse('admin:admin_views_complexsortedperson_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_complexsortedperson_change', args=(p2.pk,))
response = self.client.get(reverse('admin:admin_views_complexsortedperson_changelist'), {})
# Should have 5 columns (including action checkbox col)
self.assertContains(response, '<th scope="col"', count=5)
self.assertContains(response, 'Name')
self.assertContains(response, 'Colored name')
# Check order
self.assertContentBefore(response, 'Name', 'Colored name')
# Check sorting - should be by name
self.assertContentBefore(response, link2, link1)
def test_sort_indicators_admin_order(self):
"""
Ensures that the admin shows default sort indicators for all
kinds of 'ordering' fields: field names, method on the model
admin and model itself, and other callables. See #17252.
"""
models = [(AdminOrderedField, 'adminorderedfield'),
(AdminOrderedModelMethod, 'adminorderedmodelmethod'),
(AdminOrderedAdminMethod, 'adminorderedadminmethod'),
(AdminOrderedCallable, 'adminorderedcallable')]
for model, url in models:
model.objects.create(stuff='The Last Item', order=3)
model.objects.create(stuff='The First Item', order=1)
model.objects.create(stuff='The Middle Item', order=2)
response = self.client.get(reverse('admin:admin_views_%s_changelist' % url), {})
self.assertEqual(response.status_code, 200)
# Should have 3 columns including action checkbox col.
self.assertContains(response, '<th scope="col"', count=3, msg_prefix=url)
# Check if the correct column was selected. 2 is the index of the
# 'order' column in the model admin's 'list_display' with 0 being
# the implicit 'action_checkbox' and 1 being the column 'stuff'.
self.assertEqual(response.context['cl'].get_ordering_field_columns(), {2: 'asc'})
# Check order of records.
self.assertContentBefore(response, 'The First Item', 'The Middle Item')
self.assertContentBefore(response, 'The Middle Item', 'The Last Item')
def test_has_related_field_in_list_display(self):
"""Joins shouldn't be performed for <FK>_id fields in list display."""
state = State.objects.create(name='Karnataka')
City.objects.create(state=state, name='Bangalore')
response = self.client.get(reverse('admin:admin_views_city_changelist'), {})
response.context['cl'].list_display = ['id', 'name', 'state']
self.assertIs(response.context['cl'].has_related_field_in_list_display(), True)
response.context['cl'].list_display = ['id', 'name', 'state_id']
self.assertIs(response.context['cl'].has_related_field_in_list_display(), False)
def test_limited_filter(self):
"""Ensure admin changelist filters do not contain objects excluded via limit_choices_to.
This also tests relation-spanning filters (e.g. 'color__value').
"""
response = self.client.get(reverse('admin:admin_views_thing_changelist'))
self.assertContains(
response, '<div id="changelist-filter">',
msg_prefix="Expected filter not found in changelist view"
)
self.assertNotContains(
response, '<a href="?color__id__exact=3">Blue</a>',
msg_prefix="Changelist filter not correctly limited by limit_choices_to"
)
def test_relation_spanning_filters(self):
changelist_url = reverse('admin:admin_views_chapterxtra1_changelist')
response = self.client.get(changelist_url)
self.assertContains(response, '<div id="changelist-filter">')
filters = {
'chap__id__exact': dict(
values=[c.id for c in Chapter.objects.all()],
test=lambda obj, value: obj.chap.id == value),
'chap__title': dict(
values=[c.title for c in Chapter.objects.all()],
test=lambda obj, value: obj.chap.title == value),
'chap__book__id__exact': dict(
values=[b.id for b in Book.objects.all()],
test=lambda obj, value: obj.chap.book.id == value),
'chap__book__name': dict(
values=[b.name for b in Book.objects.all()],
test=lambda obj, value: obj.chap.book.name == value),
'chap__book__promo__id__exact': dict(
values=[p.id for p in Promo.objects.all()],
test=lambda obj, value: obj.chap.book.promo_set.filter(id=value).exists()),
'chap__book__promo__name': dict(
values=[p.name for p in Promo.objects.all()],
test=lambda obj, value: obj.chap.book.promo_set.filter(name=value).exists()),
}
for filter_path, params in filters.items():
for value in params['values']:
query_string = urlencode({filter_path: value})
# ensure filter link exists
self.assertContains(response, '<a href="?%s"' % query_string)
# ensure link works
filtered_response = self.client.get('%s?%s' % (changelist_url, query_string))
self.assertEqual(filtered_response.status_code, 200)
# ensure changelist contains only valid objects
for obj in filtered_response.context['cl'].queryset.all():
self.assertTrue(params['test'](obj, value))
def test_incorrect_lookup_parameters(self):
"""Ensure incorrect lookup parameters are handled gracefully."""
changelist_url = reverse('admin:admin_views_thing_changelist')
response = self.client.get(changelist_url, {'notarealfield': '5'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
# Spanning relationships through a nonexistent related object (Refs #16716)
response = self.client.get(changelist_url, {'notarealfield__whatever': '5'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
response = self.client.get(changelist_url, {'color__id__exact': 'StringNotInteger!'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
# Regression test for #18530
response = self.client.get(changelist_url, {'pub_date__gte': 'foo'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
def test_isnull_lookups(self):
"""Ensure is_null is handled correctly."""
Article.objects.create(title="I Could Go Anywhere", content="Versatile", date=datetime.datetime.now())
changelist_url = reverse('admin:admin_views_article_changelist')
response = self.client.get(changelist_url)
self.assertContains(response, '4 articles')
response = self.client.get(changelist_url, {'section__isnull': 'false'})
self.assertContains(response, '3 articles')
response = self.client.get(changelist_url, {'section__isnull': '0'})
self.assertContains(response, '3 articles')
response = self.client.get(changelist_url, {'section__isnull': 'true'})
self.assertContains(response, '1 article')
response = self.client.get(changelist_url, {'section__isnull': '1'})
self.assertContains(response, '1 article')
def test_logout_and_password_change_URLs(self):
response = self.client.get(reverse('admin:admin_views_article_changelist'))
self.assertContains(response, '<a href="%s">' % reverse('admin:logout'))
self.assertContains(response, '<a href="%s">' % reverse('admin:password_change'))
def test_named_group_field_choices_change_list(self):
"""
Ensures the admin changelist shows correct values in the relevant column
for rows corresponding to instances of a model in which a named group
has been used in the choices option of a field.
"""
link1 = reverse('admin:admin_views_fabric_change', args=(self.fab1.pk,))
link2 = reverse('admin:admin_views_fabric_change', args=(self.fab2.pk,))
response = self.client.get(reverse('admin:admin_views_fabric_changelist'))
fail_msg = (
"Changelist table isn't showing the right human-readable values "
"set by a model field 'choices' option named group."
)
self.assertContains(response, '<a href="%s">Horizontal</a>' % link1, msg_prefix=fail_msg, html=True)
self.assertContains(response, '<a href="%s">Vertical</a>' % link2, msg_prefix=fail_msg, html=True)
def test_named_group_field_choices_filter(self):
"""
Ensures the filter UI shows correctly when at least one named group has
been used in the choices option of a model field.
"""
response = self.client.get(reverse('admin:admin_views_fabric_changelist'))
fail_msg = (
"Changelist filter isn't showing options contained inside a model "
"field 'choices' option named group."
)
self.assertContains(response, '<div id="changelist-filter">')
self.assertContains(
response, '<a href="?surface__exact=x" title="Horizontal">Horizontal</a>',
msg_prefix=fail_msg, html=True
)
self.assertContains(
response, '<a href="?surface__exact=y" title="Vertical">Vertical</a>',
msg_prefix=fail_msg, html=True
)
def test_change_list_null_boolean_display(self):
Post.objects.create(public=None)
response = self.client.get(reverse('admin:admin_views_post_changelist'))
self.assertContains(response, 'icon-unknown.svg')
def test_i18n_language_non_english_default(self):
"""
Check if the JavaScript i18n view returns an empty language catalog
if the default language is non-English but the selected language
is English. See #13388 and #3594 for more details.
"""
with self.settings(LANGUAGE_CODE='fr'), translation.override('en-us'):
response = self.client.get(reverse('admin:jsi18n'))
self.assertNotContains(response, 'Choisir une heure')
def test_i18n_language_non_english_fallback(self):
"""
Makes sure that the fallback language is still working properly
in cases where the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE='fr'), translation.override('none'):
response = self.client.get(reverse('admin:jsi18n'))
self.assertContains(response, 'Choisir une heure')
def test_jsi18n_with_context(self):
response = self.client.get(reverse('admin-extra-context:jsi18n'))
self.assertEqual(response.status_code, 200)
def test_L10N_deactivated(self):
"""
Check if L10N is deactivated, the JavaScript i18n view doesn't
return localized date/time formats. Refs #14824.
"""
with self.settings(LANGUAGE_CODE='ru', USE_L10N=False), translation.override('none'):
response = self.client.get(reverse('admin:jsi18n'))
self.assertNotContains(response, '%d.%m.%Y %H:%M:%S')
self.assertContains(response, '%Y-%m-%d %H:%M:%S')
def test_disallowed_filtering(self):
with patch_logger('django.security.DisallowedModelAdminLookup', 'error') as calls:
response = self.client.get(
"%s?owner__email__startswith=fuzzy" % reverse('admin:admin_views_album_changelist')
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# Filters are allowed if explicitly included in list_filter
response = self.client.get("%s?color__value__startswith=red" % reverse('admin:admin_views_thing_changelist'))
self.assertEqual(response.status_code, 200)
response = self.client.get("%s?color__value=red" % reverse('admin:admin_views_thing_changelist'))
self.assertEqual(response.status_code, 200)
# Filters should be allowed if they involve a local field without the
# need to whitelist them in list_filter or date_hierarchy.
response = self.client.get("%s?age__gt=30" % reverse('admin:admin_views_person_changelist'))
self.assertEqual(response.status_code, 200)
e1 = Employee.objects.create(name='Anonymous', gender=1, age=22, alive=True, code='123')
e2 = Employee.objects.create(name='Visitor', gender=2, age=19, alive=True, code='124')
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e1)
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e2)
response = self.client.get(reverse('admin:admin_views_workhour_changelist'))
self.assertContains(response, 'employee__person_ptr__exact')
response = self.client.get("%s?employee__person_ptr__exact=%d" % (
reverse('admin:admin_views_workhour_changelist'), e1.pk)
)
self.assertEqual(response.status_code, 200)
def test_disallowed_to_field(self):
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
url = reverse('admin:admin_views_section_changelist')
response = self.client.get(url, {TO_FIELD_VAR: 'missing_field'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# Specifying a field that is not referred by any other model registered
# to this admin site should raise an exception.
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.get(reverse('admin:admin_views_section_changelist'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# #23839 - Primary key should always be allowed, even if the referenced model isn't registered.
response = self.client.get(reverse('admin:admin_views_notreferenced_changelist'), {TO_FIELD_VAR: 'id'})
self.assertEqual(response.status_code, 200)
# #23915 - Specifying a field referenced by another model though a m2m should be allowed.
response = self.client.get(reverse('admin:admin_views_recipe_changelist'), {TO_FIELD_VAR: 'rname'})
self.assertEqual(response.status_code, 200)
# #23604, #23915 - Specifying a field referenced through a reverse m2m relationship should be allowed.
response = self.client.get(reverse('admin:admin_views_ingredient_changelist'), {TO_FIELD_VAR: 'iname'})
self.assertEqual(response.status_code, 200)
# #23329 - Specifying a field that is not referred by any other model directly registered
# to this admin site but registered through inheritance should be allowed.
response = self.client.get(reverse('admin:admin_views_referencedbyparent_changelist'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 200)
# #23431 - Specifying a field that is only referred to by a inline of a registered
# model should be allowed.
response = self.client.get(reverse('admin:admin_views_referencedbyinline_changelist'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 200)
# #25622 - Specifying a field of a model only referred by a generic
# relation should raise DisallowedModelAdminToField.
url = reverse('admin:admin_views_referencedbygenrel_changelist')
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.get(url, {TO_FIELD_VAR: 'object_id'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# We also want to prevent the add, change, and delete views from
# leaking a disallowed field value.
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.post(reverse('admin:admin_views_section_add'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
section = Section.objects.create()
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
url = reverse('admin:admin_views_section_change', args=(section.pk,))
response = self.client.post(url, {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
url = reverse('admin:admin_views_section_delete', args=(section.pk,))
response = self.client.post(url, {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
def test_allowed_filtering_15103(self):
"""
Regressions test for ticket 15103 - filtering on fields defined in a
ForeignKey 'limit_choices_to' should be allowed, otherwise raw_id_fields
can break.
"""
# Filters should be allowed if they are defined on a ForeignKey pointing to this model
url = "%s?leader__name=Palin&leader__age=27" % reverse('admin:admin_views_inquisition_changelist')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_popup_dismiss_related(self):
"""
Regression test for ticket 20664 - ensure the pk is properly quoted.
"""
actor = Actor.objects.create(name="Palin", age=27)
response = self.client.get("%s?%s" % (reverse('admin:admin_views_actor_changelist'), IS_POPUP_VAR))
self.assertContains(response, 'data-popup-opener="%s"' % actor.pk)
def test_hide_change_password(self):
"""
Tests if the "change password" link in the admin is hidden if the User
does not have a usable password set.
(against 9bea85795705d015cdadc82c68b99196a8554f5c)
"""
user = User.objects.get(username='super')
user.set_unusable_password()
user.save()
self.client.force_login(user)
response = self.client.get(reverse('admin:index'))
self.assertNotContains(
response, reverse('admin:password_change'),
msg_prefix='The "change password" link should not be displayed if a user does not have a usable password.'
)
def test_change_view_with_show_delete_extra_context(self):
"""
Ensured that the 'show_delete' context variable in the admin's change
view actually controls the display of the delete button.
Refs #10057.
"""
instance = UndeletableObject.objects.create(name='foo')
response = self.client.get(reverse('admin:admin_views_undeletableobject_change', args=(instance.pk,)))
self.assertNotContains(response, 'deletelink')
def test_allows_attributeerror_to_bubble_up(self):
"""
Ensure that AttributeErrors are allowed to bubble when raised inside
a change list view.
Requires a model to be created so there's something to be displayed
Refs: #16655, #18593, and #18747
"""
Simple.objects.create()
with self.assertRaises(AttributeError):
self.client.get(reverse('admin:admin_views_simple_changelist'))
def test_changelist_with_no_change_url(self):
"""
ModelAdmin.changelist_view shouldn't result in a NoReverseMatch if url
for change_view is removed from get_urls
Regression test for #20934
"""
UnchangeableObject.objects.create()
response = self.client.get(reverse('admin:admin_views_unchangeableobject_changelist'))
self.assertEqual(response.status_code, 200)
# Check the format of the shown object -- shouldn't contain a change link
self.assertContains(response, '<th class="field-__str__">UnchangeableObject object</th>', html=True)
def test_invalid_appindex_url(self):
"""
#21056 -- URL reversing shouldn't work for nonexistent apps.
"""
good_url = '/test_admin/admin/admin_views/'
confirm_good_url = reverse('admin:app_list',
kwargs={'app_label': 'admin_views'})
self.assertEqual(good_url, confirm_good_url)
with self.assertRaises(NoReverseMatch):
reverse('admin:app_list', kwargs={'app_label': 'this_should_fail'})
with self.assertRaises(NoReverseMatch):
reverse('admin:app_list', args=('admin_views2',))
def test_resolve_admin_views(self):
index_match = resolve('/test_admin/admin4/')
list_match = resolve('/test_admin/admin4/auth/user/')
self.assertIs(index_match.func.admin_site, customadmin.simple_site)
self.assertIsInstance(list_match.func.model_admin, customadmin.CustomPwdTemplateUserAdmin)
def test_adminsite_display_site_url(self):
"""
#13749 - Admin should display link to front-end site 'View site'
"""
url = reverse('admin:index')
response = self.client.get(url)
self.assertEqual(response.context['site_url'], '/my-site-url/')
self.assertContains(response, '<a href="/my-site-url/">View site</a>')
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# Put this app's and the shared tests templates dirs in DIRS to take precedence
# over the admin's templates dir.
'DIRS': [
os.path.join(os.path.dirname(upath(__file__)), 'templates'),
os.path.join(os.path.dirname(os.path.dirname(upath(__file__))), 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}])
class AdminCustomTemplateTests(AdminViewBasicTestCase):
def test_custom_model_admin_templates(self):
# Test custom change list template with custom extra context
response = self.client.get(reverse('admin:admin_views_customarticle_changelist'))
self.assertContains(response, "var hello = 'Hello!';")
self.assertTemplateUsed(response, 'custom_admin/change_list.html')
# Test custom add form template
response = self.client.get(reverse('admin:admin_views_customarticle_add'))
self.assertTemplateUsed(response, 'custom_admin/add_form.html')
# Add an article so we can test delete, change, and history views
post = self.client.post(reverse('admin:admin_views_customarticle_add'), {
'content': '<p>great article</p>',
'date_0': '2008-03-18',
'date_1': '10:54:39'
})
self.assertRedirects(post, reverse('admin:admin_views_customarticle_changelist'))
self.assertEqual(CustomArticle.objects.all().count(), 1)
article_pk = CustomArticle.objects.all()[0].pk
# Test custom delete, change, and object history templates
# Test custom change form template
response = self.client.get(reverse('admin:admin_views_customarticle_change', args=(article_pk,)))
self.assertTemplateUsed(response, 'custom_admin/change_form.html')
response = self.client.get(reverse('admin:admin_views_customarticle_delete', args=(article_pk,)))
self.assertTemplateUsed(response, 'custom_admin/delete_confirmation.html')
response = self.client.post(reverse('admin:admin_views_customarticle_changelist'), data={
'index': 0,
'action': ['delete_selected'],
'_selected_action': ['1'],
})
self.assertTemplateUsed(response, 'custom_admin/delete_selected_confirmation.html')
response = self.client.get(reverse('admin:admin_views_customarticle_history', args=(article_pk,)))
self.assertTemplateUsed(response, 'custom_admin/object_history.html')
def test_extended_bodyclass_template_change_form(self):
"""
Ensure that the admin/change_form.html template uses block.super in the
bodyclass block.
"""
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_change_password_template(self):
user = User.objects.get(username='super')
response = self.client.get(reverse('admin:auth_user_password_change', args=(user.id,)))
# The auth/user/change_password.html template uses super in the
# bodyclass block.
self.assertContains(response, 'bodyclass_consistency_check ')
# When a site has multiple passwords in the browser's password manager,
# a browser pop up asks which user the new password is for. To prevent
# this, the username is added to the change password form.
self.assertContains(response, '<input type="text" name="username" value="super" style="display: none" />')
def test_extended_bodyclass_template_index(self):
"""
Ensure that the admin/index.html template uses block.super in the
bodyclass block.
"""
response = self.client.get(reverse('admin:index'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_change_list(self):
"""
Ensure that the admin/change_list.html' template uses block.super
in the bodyclass block.
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_login(self):
"""
Ensure that the admin/login.html template uses block.super in the
bodyclass block.
"""
self.client.logout()
response = self.client.get(reverse('admin:login'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_delete_confirmation(self):
"""
Ensure that the admin/delete_confirmation.html template uses
block.super in the bodyclass block.
"""
group = Group.objects.create(name="foogroup")
response = self.client.get(reverse('admin:auth_group_delete', args=(group.id,)))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_delete_selected_confirmation(self):
"""
Ensure that the admin/delete_selected_confirmation.html template uses
block.super in bodyclass block.
"""
group = Group.objects.create(name="foogroup")
post_data = {
'action': 'delete_selected',
'selected_across': '0',
'index': '0',
'_selected_action': group.id
}
response = self.client.post(reverse('admin:auth_group_changelist'), post_data)
self.assertEqual(response.context['site_header'], 'Django administration')
self.assertContains(response, 'bodyclass_consistency_check ')
def test_filter_with_custom_template(self):
"""
Ensure that one can use a custom template to render an admin filter.
Refs #17515.
"""
response = self.client.get(reverse('admin:admin_views_color2_changelist'))
self.assertTemplateUsed(response, 'custom_filter_template.html')
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewFormUrlTest(TestCase):
current_app = "admin3"
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def setUp(self):
self.client.force_login(self.superuser)
def test_change_form_URL_has_correct_value(self):
"""
Tests whether change_view has form_url in response.context
"""
response = self.client.get(
reverse('admin:admin_views_section_change', args=(self.s1.pk,), current_app=self.current_app)
)
self.assertIn('form_url', response.context, msg='form_url not present in response.context')
self.assertEqual(response.context['form_url'], 'pony')
def test_initial_data_can_be_overridden(self):
"""
Tests that the behavior for setting initial
form data can be overridden in the ModelAdmin class.
Usually, the initial value is set via the GET params.
"""
response = self.client.get(
reverse('admin:admin_views_restaurant_add', current_app=self.current_app),
{'name': 'test_value'}
)
# this would be the usual behaviour
self.assertNotContains(response, 'value="test_value"')
# this is the overridden behaviour
self.assertContains(response, 'value="overridden_value"')
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminJavaScriptTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_js_minified_only_if_debug_is_false(self):
"""
Ensure that the minified versions of the JS files are only used when
DEBUG is False.
Refs #17521.
"""
with override_settings(DEBUG=False):
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertNotContains(response, 'vendor/jquery/jquery.js')
self.assertContains(response, 'vendor/jquery/jquery.min.js')
self.assertNotContains(response, 'prepopulate.js')
self.assertContains(response, 'prepopulate.min.js')
self.assertNotContains(response, 'actions.js')
self.assertContains(response, 'actions.min.js')
self.assertNotContains(response, 'collapse.js')
self.assertContains(response, 'collapse.min.js')
self.assertNotContains(response, 'inlines.js')
self.assertContains(response, 'inlines.min.js')
with override_settings(DEBUG=True):
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertContains(response, 'vendor/jquery/jquery.js')
self.assertNotContains(response, 'vendor/jquery/jquery.min.js')
self.assertContains(response, 'prepopulate.js')
self.assertNotContains(response, 'prepopulate.min.js')
self.assertContains(response, 'actions.js')
self.assertNotContains(response, 'actions.min.js')
self.assertContains(response, 'collapse.js')
self.assertNotContains(response, 'collapse.min.js')
self.assertContains(response, 'inlines.js')
self.assertNotContains(response, 'inlines.min.js')
@override_settings(ROOT_URLCONF='admin_views.urls')
class SaveAsTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_as_duplication(self):
"""Ensure save as actually creates a new person"""
post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 1, 'age': 42}
response = self.client.post(reverse('admin:admin_views_person_change', args=(self.per1.pk,)), post_data)
self.assertEqual(len(Person.objects.filter(name='John M')), 1)
self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1)
new_person = Person.objects.latest('id')
self.assertRedirects(response, reverse('admin:admin_views_person_change', args=(new_person.pk,)))
def test_save_as_continue_false(self):
"""
Saving a new object using "Save as new" redirects to the changelist
instead of the change view when ModelAdmin.save_as_continue=False.
"""
post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 1, 'age': 42}
url = reverse('admin:admin_views_person_change', args=(self.per1.pk,), current_app=site2.name)
response = self.client.post(url, post_data)
self.assertEqual(len(Person.objects.filter(name='John M')), 1)
self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1)
self.assertRedirects(response, reverse('admin:admin_views_person_changelist', current_app=site2.name))
def test_save_as_new_with_validation_errors(self):
"""
Ensure that when you click "Save as new" and have a validation error,
you only see the "Save as new" button and not the other save buttons,
and that only the "Save as" button is visible.
"""
response = self.client.post(reverse('admin:admin_views_person_change', args=(self.per1.pk,)), {
'_saveasnew': '',
'gender': 'invalid',
'_addanother': 'fail',
})
self.assertContains(response, 'Please correct the errors below.')
self.assertFalse(response.context['show_save_and_add_another'])
self.assertFalse(response.context['show_save_and_continue'])
self.assertTrue(response.context['show_save_as_new'])
def test_save_as_new_with_validation_errors_with_inlines(self):
parent = Parent.objects.create(name='Father')
child = Child.objects.create(parent=parent, name='Child')
response = self.client.post(reverse('admin:admin_views_parent_change', args=(parent.pk,)), {
'_saveasnew': 'Save as new',
'child_set-0-parent': parent.pk,
'child_set-0-id': child.pk,
'child_set-0-name': 'Child',
'child_set-INITIAL_FORMS': 1,
'child_set-MAX_NUM_FORMS': 1000,
'child_set-MIN_NUM_FORMS': 0,
'child_set-TOTAL_FORMS': 4,
'name': '_invalid',
})
self.assertContains(response, 'Please correct the error below.')
self.assertFalse(response.context['show_save_and_add_another'])
self.assertFalse(response.context['show_save_and_continue'])
self.assertTrue(response.context['show_save_as_new'])
def test_save_as_new_with_inlines_with_validation_errors(self):
parent = Parent.objects.create(name='Father')
child = Child.objects.create(parent=parent, name='Child')
response = self.client.post(reverse('admin:admin_views_parent_change', args=(parent.pk,)), {
'_saveasnew': 'Save as new',
'child_set-0-parent': parent.pk,
'child_set-0-id': child.pk,
'child_set-0-name': '_invalid',
'child_set-INITIAL_FORMS': 1,
'child_set-MAX_NUM_FORMS': 1000,
'child_set-MIN_NUM_FORMS': 0,
'child_set-TOTAL_FORMS': 4,
'name': 'Father',
})
self.assertContains(response, 'Please correct the error below.')
self.assertFalse(response.context['show_save_and_add_another'])
self.assertFalse(response.context['show_save_and_continue'])
self.assertTrue(response.context['show_save_as_new'])
@override_settings(ROOT_URLCONF='admin_views.urls')
class CustomModelAdminTest(AdminViewBasicTestCase):
def test_custom_admin_site_login_form(self):
self.client.logout()
response = self.client.get(reverse('admin2:index'), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
login = self.client.post(reverse('admin2:login'), {
REDIRECT_FIELD_NAME: reverse('admin2:index'),
'username': 'customform',
'password': 'secret',
}, follow=True)
self.assertIsInstance(login, TemplateResponse)
self.assertEqual(login.status_code, 200)
self.assertContains(login, 'custom form error')
self.assertContains(login, 'path/to/media.css')
def test_custom_admin_site_login_template(self):
self.client.logout()
response = self.client.get(reverse('admin2:index'), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/login.html')
self.assertContains(response, 'Hello from a custom login template')
def test_custom_admin_site_logout_template(self):
response = self.client.get(reverse('admin2:logout'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/logout.html')
self.assertContains(response, 'Hello from a custom logout template')
def test_custom_admin_site_index_view_and_template(self):
response = self.client.get(reverse('admin2:index'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/index.html')
self.assertContains(response, 'Hello from a custom index template *bar*')
def test_custom_admin_site_app_index_view_and_template(self):
response = self.client.get(reverse('admin2:app_list', args=('admin_views',)))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/app_index.html')
self.assertContains(response, 'Hello from a custom app_index template')
def test_custom_admin_site_password_change_template(self):
response = self.client.get(reverse('admin2:password_change'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_form.html')
self.assertContains(response, 'Hello from a custom password change form template')
def test_custom_admin_site_password_change_with_extra_context(self):
response = self.client.get(reverse('admin2:password_change'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_form.html')
self.assertContains(response, 'eggs')
def test_custom_admin_site_password_change_done_template(self):
response = self.client.get(reverse('admin2:password_change_done'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_done.html')
self.assertContains(response, 'Hello from a custom password change done template')
def test_custom_admin_site_view(self):
self.client.force_login(self.superuser)
response = self.client.get(reverse('admin2:my_view'))
self.assertEqual(response.content, b"Django is a magical pony!")
def test_pwd_change_custom_template(self):
self.client.force_login(self.superuser)
su = User.objects.get(username='super')
response = self.client.get(reverse('admin4:auth_user_password_change', args=(su.pk,)))
self.assertEqual(response.status_code, 200)
def get_perm(Model, perm):
"""Return the permission object, for the Model"""
ct = ContentType.objects.get_for_model(Model)
return Permission.objects.get(content_type=ct, codename=perm)
@override_settings(
ROOT_URLCONF='admin_views.urls',
# Test with the admin's documented list of required context processors.
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}],
)
class AdminViewPermissionsTest(TestCase):
"""Tests for Admin Views Permissions."""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.adduser = User.objects.create_user(username='adduser', password='secret', is_staff=True)
cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True)
cls.deleteuser = User.objects.create_user(username='deleteuser', password='secret', is_staff=True)
cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret')
cls.nostaffuser = User.objects.create_user(username='nostaff', password='secret')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1,
another_section=cls.s1,
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
# Setup permissions, for our users who can add, change, and delete.
opts = Article._meta
# User who can add Articles
cls.adduser.user_permissions.add(get_perm(Article, get_permission_codename('add', opts)))
# User who can change Articles
cls.changeuser.user_permissions.add(get_perm(Article, get_permission_codename('change', opts)))
cls.nostaffuser.user_permissions.add(get_perm(Article, get_permission_codename('change', opts)))
# User who can delete Articles
cls.deleteuser.user_permissions.add(get_perm(Article, get_permission_codename('delete', opts)))
cls.deleteuser.user_permissions.add(get_perm(Section, get_permission_codename('delete', Section._meta)))
# login POST dicts
cls.index_url = reverse('admin:index')
cls.super_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'super',
'password': 'secret',
}
cls.super_email_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': '[email protected]',
'password': 'secret',
}
cls.super_email_bad_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': '[email protected]',
'password': 'notsecret',
}
cls.adduser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'adduser',
'password': 'secret',
}
cls.changeuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'changeuser',
'password': 'secret',
}
cls.deleteuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'deleteuser',
'password': 'secret',
}
cls.nostaff_login = {
REDIRECT_FIELD_NAME: reverse('has_permission_admin:index'),
'username': 'nostaff',
'password': 'secret',
}
cls.joepublic_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'joepublic',
'password': 'secret',
}
cls.no_username_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'password': 'secret',
}
def test_login(self):
"""
Make sure only staff members can log in.
Successful posts to the login page will redirect to the original url.
Unsuccessful attempts will continue to render the login page with
a 200 status code.
"""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
# Super User
response = self.client.get(self.index_url)
self.assertRedirects(response, login_url)
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Test if user enters email address
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# only correct passwords get a username hint
login = self.client.post(login_url, self.super_email_bad_login)
self.assertContains(login, ERROR_MESSAGE)
new_user = User(username='jondoe', password='secret', email='[email protected]')
new_user.save()
# check to ensure if there are multiple email addresses a user doesn't get a 500
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# Add User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.adduser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Change User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.changeuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Delete User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.deleteuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Regular User should not be able to login.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, ERROR_MESSAGE)
# Requests without username should not return 500 errors.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.no_username_login)
self.assertEqual(login.status_code, 200)
form = login.context[0].get('form')
self.assertEqual(form.errors['username'][0], 'This field is required.')
def test_login_redirect_for_direct_get(self):
"""
Login redirect should be to the admin index page when going directly to
/admin/login/.
"""
response = self.client.get(reverse('admin:login'))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context[REDIRECT_FIELD_NAME], reverse('admin:index'))
def test_login_has_permission(self):
# Regular User should not be able to login.
response = self.client.get(reverse('has_permission_admin:index'))
self.assertEqual(response.status_code, 302)
login = self.client.post(reverse('has_permission_admin:login'), self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, 'permission denied')
# User with permissions should be able to login.
response = self.client.get(reverse('has_permission_admin:index'))
self.assertEqual(response.status_code, 302)
login = self.client.post(reverse('has_permission_admin:login'), self.nostaff_login)
self.assertRedirects(login, reverse('has_permission_admin:index'))
self.assertFalse(login.context)
self.client.get(reverse('has_permission_admin:logout'))
# Staff should be able to login.
response = self.client.get(reverse('has_permission_admin:index'))
self.assertEqual(response.status_code, 302)
login = self.client.post(reverse('has_permission_admin:login'), {
REDIRECT_FIELD_NAME: reverse('has_permission_admin:index'),
'username': 'deleteuser',
'password': 'secret',
})
self.assertRedirects(login, reverse('has_permission_admin:index'))
self.assertFalse(login.context)
self.client.get(reverse('has_permission_admin:logout'))
def test_login_successfully_redirects_to_original_URL(self):
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
query_string = 'the-answer=42'
redirect_url = '%s?%s' % (self.index_url, query_string)
new_next = {REDIRECT_FIELD_NAME: redirect_url}
post_data = self.super_login.copy()
post_data.pop(REDIRECT_FIELD_NAME)
login = self.client.post(
'%s?%s' % (reverse('admin:login'), urlencode(new_next)),
post_data)
self.assertRedirects(login, redirect_url)
def test_double_login_is_not_allowed(self):
"""Regression test for #19327"""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with non-admin user fails
login = self.client.post(login_url, self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, ERROR_MESSAGE)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with admin user while already logged in
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
def test_login_page_notice_for_non_staff_users(self):
"""
A logged-in non-staff user trying to access the admin index should be
presented with the login page and a hint indicating that the current
user doesn't have access to it.
"""
hint_template = 'You are authenticated as {}'
# Anonymous user should not be shown the hint
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'login-form')
self.assertNotContains(response, hint_template.format(''), status_code=200)
# Non-staff user should be shown the hint
self.client.force_login(self.nostaffuser)
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'login-form')
self.assertContains(response, hint_template.format(self.nostaffuser.username), status_code=200)
def test_add_view(self):
"""Test add view restricts access and actually adds items."""
add_dict = {'title': 'Døm ikke',
'content': '<p>great article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': self.s1.pk}
# Change User should not have access to add articles
self.client.force_login(self.changeuser)
# make sure the view removes test cookie
self.assertIs(self.client.session.test_cookie_worked(), False)
response = self.client.get(reverse('admin:admin_views_article_add'))
self.assertEqual(response.status_code, 403)
# Try POST just to make sure
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.get(reverse('admin:logout'))
# Add user may login and POST to add view, then redirect to admin root
self.client.force_login(self.adduser)
addpage = self.client.get(reverse('admin:admin_views_article_add'))
change_list_link = '› <a href="%s">Articles</a>' % reverse('admin:admin_views_article_changelist')
self.assertNotContains(
addpage, change_list_link,
msg_prefix='User restricted to add permission is given link to change list view in breadcrumbs.'
)
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), 4)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a created object')
self.client.get(reverse('admin:logout'))
# Check that the addition was logged correctly
addition_log = LogEntry.objects.all()[0]
new_article = Article.objects.last()
article_ct = ContentType.objects.get_for_model(Article)
self.assertEqual(addition_log.user_id, self.adduser.pk)
self.assertEqual(addition_log.content_type_id, article_ct.pk)
self.assertEqual(addition_log.object_id, str(new_article.pk))
self.assertEqual(addition_log.object_repr, "Døm ikke")
self.assertEqual(addition_log.action_flag, ADDITION)
self.assertEqual(addition_log.get_change_message(), "Added.")
# Super can add too, but is redirected to the change list view
self.client.force_login(self.superuser)
addpage = self.client.get(reverse('admin:admin_views_article_add'))
self.assertContains(
addpage, change_list_link,
msg_prefix='Unrestricted user is not given link to change list view in breadcrumbs.'
)
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertRedirects(post, reverse('admin:admin_views_article_changelist'))
self.assertEqual(Article.objects.count(), 5)
self.client.get(reverse('admin:logout'))
# 8509 - if a normal user is already logged in, it is possible
# to change user into the superuser without error
self.client.force_login(self.joepublicuser)
# Check and make sure that if user expires, data still persists
self.client.force_login(self.superuser)
# make sure the view removes test cookie
self.assertIs(self.client.session.test_cookie_worked(), False)
def test_change_view(self):
"""Change view should restrict access and allow users to edit items."""
change_dict = {'title': 'Ikke fordømt',
'content': '<p>edited article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': self.s1.pk}
article_change_url = reverse('admin:admin_views_article_change', args=(self.a1.pk,))
article_changelist_url = reverse('admin:admin_views_article_changelist')
# add user should not be able to view the list of article or change any of them
self.client.force_login(self.adduser)
response = self.client.get(article_changelist_url)
self.assertEqual(response.status_code, 403)
response = self.client.get(article_change_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(article_change_url, change_dict)
self.assertEqual(post.status_code, 403)
self.client.get(reverse('admin:logout'))
# change user can view all items and edit them
self.client.force_login(self.changeuser)
response = self.client.get(article_changelist_url)
self.assertEqual(response.status_code, 200)
response = self.client.get(article_change_url)
self.assertEqual(response.status_code, 200)
post = self.client.post(article_change_url, change_dict)
self.assertRedirects(post, article_changelist_url)
self.assertEqual(Article.objects.get(pk=self.a1.pk).content, '<p>edited article</p>')
# one error in form should produce singular error message, multiple errors plural
change_dict['title'] = ''
post = self.client.post(article_change_url, change_dict)
self.assertContains(
post, 'Please correct the error below.',
msg_prefix='Singular error message not found in response to post with one error'
)
change_dict['content'] = ''
post = self.client.post(article_change_url, change_dict)
self.assertContains(
post, 'Please correct the errors below.',
msg_prefix='Plural error message not found in response to post with multiple errors'
)
self.client.get(reverse('admin:logout'))
# Test redirection when using row-level change permissions. Refs #11513.
r1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
r2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id")
change_url_1 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r1.pk,))
change_url_2 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r2.pk,))
for login_user in [self.superuser, self.adduser, self.changeuser, self.deleteuser]:
self.client.force_login(login_user)
response = self.client.get(change_url_1)
self.assertEqual(response.status_code, 403)
response = self.client.post(change_url_1, {'name': 'changed'})
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id')
self.assertEqual(response.status_code, 403)
response = self.client.get(change_url_2)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_2, {'name': 'changed'})
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed')
self.assertRedirects(response, self.index_url)
self.client.get(reverse('admin:logout'))
for login_user in [self.joepublicuser, self.nostaffuser]:
self.client.force_login(login_user)
response = self.client.get(change_url_1, follow=True)
self.assertContains(response, 'login-form')
response = self.client.post(change_url_1, {'name': 'changed'}, follow=True)
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id')
self.assertContains(response, 'login-form')
response = self.client.get(change_url_2, follow=True)
self.assertContains(response, 'login-form')
response = self.client.post(change_url_2, {'name': 'changed again'}, follow=True)
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed')
self.assertContains(response, 'login-form')
self.client.get(reverse('admin:logout'))
def test_change_view_save_as_new(self):
"""
'Save as new' should raise PermissionDenied for users without the 'add'
permission.
"""
change_dict_save_as_new = {
'_saveasnew': 'Save as new',
'title': 'Ikke fordømt',
'content': '<p>edited article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': self.s1.pk,
}
article_change_url = reverse('admin:admin_views_article_change', args=(self.a1.pk,))
# Add user can perform "Save as new".
article_count = Article.objects.count()
self.client.force_login(self.adduser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), article_count + 1)
self.client.logout()
# Change user cannot perform "Save as new" (no 'add' permission).
article_count = Article.objects.count()
self.client.force_login(self.changeuser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), article_count)
# User with both add and change permissions should be redirected to the
# change page for the newly created object.
article_count = Article.objects.count()
self.client.force_login(self.superuser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertEqual(Article.objects.count(), article_count + 1)
new_article = Article.objects.latest('id')
self.assertRedirects(post, reverse('admin:admin_views_article_change', args=(new_article.pk,)))
def test_delete_view(self):
"""Delete view should restrict access and actually delete items."""
delete_dict = {'post': 'yes'}
delete_url = reverse('admin:admin_views_article_delete', args=(self.a1.pk,))
# add user should not be able to delete articles
self.client.force_login(self.adduser)
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(delete_url, delete_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.logout()
# Delete user can delete
self.client.force_login(self.deleteuser)
response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,)))
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 3</li>")
# test response contains link to related Article
self.assertContains(response, "admin_views/article/%s/" % self.a1.pk)
response = self.client.get(delete_url)
self.assertContains(response, "admin_views/article/%s/" % self.a1.pk)
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 1</li>")
self.assertEqual(response.status_code, 200)
post = self.client.post(delete_url, delete_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), 2)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a deleted object')
article_ct = ContentType.objects.get_for_model(Article)
logged = LogEntry.objects.get(content_type=article_ct, action_flag=DELETION)
self.assertEqual(logged.object_id, str(self.a1.pk))
def test_history_view(self):
"""History view should restrict access."""
# add user should not be able to view the list of article or change any of them
self.client.force_login(self.adduser)
response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,)))
self.assertEqual(response.status_code, 403)
self.client.get(reverse('admin:logout'))
# change user can view all items and edit them
self.client.force_login(self.changeuser)
response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,)))
self.assertEqual(response.status_code, 200)
# Test redirection when using row-level change permissions. Refs #11513.
rl1 = RowLevelChangePermissionModel.objects.create(name="odd id")
rl2 = RowLevelChangePermissionModel.objects.create(name="even id")
for login_user in [self.superuser, self.adduser, self.changeuser, self.deleteuser]:
self.client.force_login(login_user)
url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl1.pk,))
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl2.pk,))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.client.get(reverse('admin:logout'))
for login_user in [self.joepublicuser, self.nostaffuser]:
self.client.force_login(login_user)
url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl1.pk,))
response = self.client.get(url, follow=True)
self.assertContains(response, 'login-form')
url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl2.pk,))
response = self.client.get(url, follow=True)
self.assertContains(response, 'login-form')
self.client.get(reverse('admin:logout'))
def test_history_view_bad_url(self):
self.client.force_login(self.changeuser)
response = self.client.get(reverse('admin:admin_views_article_history', args=('foo',)))
self.assertEqual(response.status_code, 404)
def test_conditionally_show_add_section_link(self):
"""
The foreign key widget should only show the "add related" button if the
user has permission to add that related item.
"""
self.client.force_login(self.adduser)
# The user can't add sections yet, so they shouldn't see the "add section" link.
url = reverse('admin:admin_views_article_add')
add_link_text = 'add_id_section'
response = self.client.get(url)
self.assertNotContains(response, add_link_text)
# Allow the user to add sections too. Now they can see the "add section" link.
user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('add', Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertContains(response, add_link_text)
def test_conditionally_show_change_section_link(self):
"""
The foreign key widget should only show the "change related" button if
the user has permission to change that related item.
"""
def get_change_related(response):
return response.context['adminform'].form.fields['section'].widget.can_change_related
self.client.force_login(self.adduser)
# The user can't change sections yet, so they shouldn't see the "change section" link.
url = reverse('admin:admin_views_article_add')
change_link_text = 'change_id_section'
response = self.client.get(url)
self.assertFalse(get_change_related(response))
self.assertNotContains(response, change_link_text)
# Allow the user to change sections too. Now they can see the "change section" link.
user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('change', Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_change_related(response))
self.assertContains(response, change_link_text)
def test_conditionally_show_delete_section_link(self):
"""
The foreign key widget should only show the "delete related" button if
the user has permission to delete that related item.
"""
def get_delete_related(response):
return response.context['adminform'].form.fields['sub_section'].widget.can_delete_related
self.client.force_login(self.adduser)
# The user can't delete sections yet, so they shouldn't see the "delete section" link.
url = reverse('admin:admin_views_article_add')
delete_link_text = 'delete_id_sub_section'
response = self.client.get(url)
self.assertFalse(get_delete_related(response))
self.assertNotContains(response, delete_link_text)
# Allow the user to delete sections too. Now they can see the "delete section" link.
user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('delete', Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_delete_related(response))
self.assertContains(response, delete_link_text)
def test_disabled_permissions_when_logged_in(self):
self.client.force_login(self.superuser)
superuser = User.objects.get(username='super')
superuser.is_active = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, 'Log out')
response = self.client.get(reverse('secure_view'), follow=True)
self.assertContains(response, 'id="login-form"')
def test_disabled_staff_permissions_when_logged_in(self):
self.client.force_login(self.superuser)
superuser = User.objects.get(username='super')
superuser.is_staff = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, 'Log out')
response = self.client.get(reverse('secure_view'), follow=True)
self.assertContains(response, 'id="login-form"')
def test_app_index_fail_early(self):
"""
If a user has no module perms, avoid iterating over all the modeladmins
in the registry.
"""
opts = Article._meta
change_user = User.objects.get(username='changeuser')
permission = get_perm(Article, get_permission_codename('change', opts))
self.client.force_login(self.changeuser)
# the user has no module permissions, because this module doesn't exist
change_user.user_permissions.remove(permission)
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(response.status_code, 403)
# the user now has module permissions
change_user.user_permissions.add(permission)
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(response.status_code, 200)
def test_shortcut_view_only_available_to_staff(self):
"""
Only admin users should be able to use the admin shortcut view.
"""
model_ctype = ContentType.objects.get_for_model(ModelWithStringPrimaryKey)
obj = ModelWithStringPrimaryKey.objects.create(string_pk='foo')
shortcut_url = reverse('admin:view_on_site', args=(model_ctype.pk, obj.pk))
# Not logged in: we should see the login page.
response = self.client.get(shortcut_url, follow=True)
self.assertTemplateUsed(response, 'admin/login.html')
# Logged in? Redirect.
self.client.force_login(self.superuser)
response = self.client.get(shortcut_url, follow=False)
# Can't use self.assertRedirects() because User.get_absolute_url() is silly.
self.assertEqual(response.status_code, 302)
# Domain may depend on contrib.sites tests also run
six.assertRegex(self, response.url, 'http://(testserver|example.com)/dummy/foo/')
def test_has_module_permission(self):
"""
Ensure that has_module_permission() returns True for all users who
have any permission for that module (add, change, or delete), so that
the module is displayed on the admin index page.
"""
self.client.force_login(self.superuser)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.logout()
self.client.force_login(self.adduser)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.logout()
self.client.force_login(self.changeuser)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.logout()
self.client.force_login(self.deleteuser)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
def test_overriding_has_module_permission(self):
"""
Ensure that overriding has_module_permission() has the desired effect.
In this case, it always returns False, so the module should not be
displayed on the admin index page for any users.
"""
index_url = reverse('admin7:index')
self.client.force_login(self.superuser)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, 'Articles')
self.client.logout()
self.client.force_login(self.adduser)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, 'Articles')
self.client.logout()
self.client.force_login(self.changeuser)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, 'Articles')
self.client.logout()
self.client.force_login(self.deleteuser)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, 'Articles')
def test_post_save_message_no_forbidden_links_visible(self):
"""
Post-save message shouldn't contain a link to the change form if the
user doen't have the change permission.
"""
self.client.force_login(self.adduser)
# Emulate Article creation for user with add-only permission.
post_data = {
"title": "Fun & games",
"content": "Some content",
"date_0": "2015-10-31",
"date_1": "16:35:00",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_article_add'), post_data, follow=True)
self.assertContains(
response,
'<li class="success">The article "Fun & games" was added successfully.</li>',
html=True
)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewsNoUrlTest(TestCase):
"""Regression test for #17333"""
@classmethod
def setUpTestData(cls):
# User who can change Reports
cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True)
cls.changeuser.user_permissions.add(get_perm(Report, get_permission_codename('change', Report._meta)))
def test_no_standard_modeladmin_urls(self):
"""Admin index views don't break when user's ModelAdmin removes standard urls"""
self.client.force_login(self.changeuser)
r = self.client.get(reverse('admin:index'))
# we shouldn't get a 500 error caused by a NoReverseMatch
self.assertEqual(r.status_code, 200)
self.client.get(reverse('admin:logout'))
@skipUnlessDBFeature('can_defer_constraint_checks')
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewDeletedObjectsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.deleteuser = User.objects.create_user(username='deleteuser', password='secret', is_staff=True)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.v1 = Villain.objects.create(name='Adam')
cls.v2 = Villain.objects.create(name='Sue')
cls.sv1 = SuperVillain.objects.create(name='Bob')
cls.pl1 = Plot.objects.create(name='World Domination', team_leader=cls.v1, contact=cls.v2)
cls.pl2 = Plot.objects.create(name='World Peace', team_leader=cls.v2, contact=cls.v2)
cls.pl3 = Plot.objects.create(name='Corn Conspiracy', team_leader=cls.v1, contact=cls.v1)
cls.pd1 = PlotDetails.objects.create(details='almost finished', plot=cls.pl1)
cls.sh1 = SecretHideout.objects.create(location='underground bunker', villain=cls.v1)
cls.sh2 = SecretHideout.objects.create(location='floating castle', villain=cls.sv1)
cls.ssh1 = SuperSecretHideout.objects.create(location='super floating castle!', supervillain=cls.sv1)
cls.cy1 = CyclicOne.objects.create(name='I am recursive', two_id=1)
cls.cy2 = CyclicTwo.objects.create(name='I am recursive too', one_id=1)
def setUp(self):
self.client.force_login(self.superuser)
def test_nesting(self):
"""
Objects should be nested to display the relationships that
cause them to be scheduled for deletion.
"""
pattern = re.compile(
force_bytes(
r'<li>Plot: <a href="%s">World Domination</a>\s*<ul>\s*'
r'<li>Plot details: <a href="%s">almost finished</a>' % (
reverse('admin:admin_views_plot_change', args=(self.pl1.pk,)),
reverse('admin:admin_views_plotdetails_change', args=(self.pd1.pk,)),
)
)
)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,)))
six.assertRegex(self, response.content, pattern)
def test_cyclic(self):
"""
Cyclic relationships should still cause each object to only be
listed once.
"""
one = '<li>Cyclic one: <a href="%s">I am recursive</a>' % (
reverse('admin:admin_views_cyclicone_change', args=(self.cy1.pk,)),
)
two = '<li>Cyclic two: <a href="%s">I am recursive too</a>' % (
reverse('admin:admin_views_cyclictwo_change', args=(self.cy2.pk,)),
)
response = self.client.get(reverse('admin:admin_views_cyclicone_delete', args=(self.cy1.pk,)))
self.assertContains(response, one, 1)
self.assertContains(response, two, 1)
def test_perms_needed(self):
self.client.logout()
delete_user = User.objects.get(username='deleteuser')
delete_user.user_permissions.add(get_perm(Plot, get_permission_codename('delete', Plot._meta)))
self.client.force_login(self.deleteuser)
response = self.client.get(reverse('admin:admin_views_plot_delete', args=(self.pl1.pk,)))
self.assertContains(response, "your account doesn't have permission to delete the following types of objects")
self.assertContains(response, "<li>plot details</li>")
def test_protected(self):
q = Question.objects.create(question="Why?")
a1 = Answer.objects.create(question=q, answer="Because.")
a2 = Answer.objects.create(question=q, answer="Yes.")
response = self.client.get(reverse('admin:admin_views_question_delete', args=(q.pk,)))
self.assertContains(response, "would require deleting the following protected related objects")
self.assertContains(
response,
'<li>Answer: <a href="%s">Because.</a></li>' % reverse('admin:admin_views_answer_change', args=(a1.pk,))
)
self.assertContains(
response,
'<li>Answer: <a href="%s">Yes.</a></li>' % reverse('admin:admin_views_answer_change', args=(a2.pk,))
)
def test_post_delete_protected(self):
"""
A POST request to delete protected objects should display the page
which says the deletion is prohibited.
"""
q = Question.objects.create(question='Why?')
Answer.objects.create(question=q, answer='Because.')
response = self.client.post(reverse('admin:admin_views_question_delete', args=(q.pk,)), {'post': 'yes'})
self.assertEqual(Question.objects.count(), 1)
self.assertContains(response, "would require deleting the following protected related objects")
def test_not_registered(self):
should_contain = """<li>Secret hideout: underground bunker"""
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,)))
self.assertContains(response, should_contain, 1)
def test_multiple_fkeys_to_same_model(self):
"""
If a deleted object has two relationships from another model,
both of those should be followed in looking for related
objects to delete.
"""
should_contain = '<li>Plot: <a href="%s">World Domination</a>' % reverse(
'admin:admin_views_plot_change', args=(self.pl1.pk,)
)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,)))
self.assertContains(response, should_contain)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v2.pk,)))
self.assertContains(response, should_contain)
def test_multiple_fkeys_to_same_instance(self):
"""
If a deleted object has two relationships pointing to it from
another object, the other object should still only be listed
once.
"""
should_contain = '<li>Plot: <a href="%s">World Peace</a></li>' % reverse(
'admin:admin_views_plot_change', args=(self.pl2.pk,)
)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v2.pk,)))
self.assertContains(response, should_contain, 1)
def test_inheritance(self):
"""
In the case of an inherited model, if either the child or
parent-model instance is deleted, both instances are listed
for deletion, as well as any relationships they have.
"""
should_contain = [
'<li>Villain: <a href="%s">Bob</a>' % reverse('admin:admin_views_villain_change', args=(self.sv1.pk,)),
'<li>Super villain: <a href="%s">Bob</a>' % reverse(
'admin:admin_views_supervillain_change', args=(self.sv1.pk,)
),
'<li>Secret hideout: floating castle',
'<li>Super secret hideout: super floating castle!',
]
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.sv1.pk,)))
for should in should_contain:
self.assertContains(response, should, 1)
response = self.client.get(reverse('admin:admin_views_supervillain_delete', args=(self.sv1.pk,)))
for should in should_contain:
self.assertContains(response, should, 1)
def test_generic_relations(self):
"""
If a deleted object has GenericForeignKeys pointing to it,
those objects should be listed for deletion.
"""
plot = self.pl3
tag = FunkyTag.objects.create(content_object=plot, name='hott')
should_contain = '<li>Funky tag: <a href="%s">hott' % reverse(
'admin:admin_views_funkytag_change', args=(tag.id,))
response = self.client.get(reverse('admin:admin_views_plot_delete', args=(plot.pk,)))
self.assertContains(response, should_contain)
def test_generic_relations_with_related_query_name(self):
"""
If a deleted object has GenericForeignKey with
GenericRelation(related_query_name='...') pointing to it, those objects
should be listed for deletion.
"""
bookmark = Bookmark.objects.create(name='djangoproject')
tag = FunkyTag.objects.create(content_object=bookmark, name='django')
tag_url = reverse('admin:admin_views_funkytag_change', args=(tag.id,))
should_contain = '<li>Funky tag: <a href="%s">django' % tag_url
response = self.client.get(reverse('admin:admin_views_bookmark_delete', args=(bookmark.pk,)))
self.assertContains(response, should_contain)
@override_settings(ROOT_URLCONF='admin_views.urls')
class TestGenericRelations(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.v1 = Villain.objects.create(name='Adam')
cls.pl3 = Plot.objects.create(name='Corn Conspiracy', team_leader=cls.v1, contact=cls.v1)
def setUp(self):
self.client.force_login(self.superuser)
def test_generic_content_object_in_list_display(self):
FunkyTag.objects.create(content_object=self.pl3, name='hott')
response = self.client.get(reverse('admin:admin_views_funkytag_changelist'))
self.assertContains(response, "%s</td>" % self.pl3)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewStringPrimaryKeyTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.pk = (
"abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890 "
"""-_.!~*'() ;/?:@&=+$, <>#%" {}|\^[]`"""
)
cls.m1 = ModelWithStringPrimaryKey.objects.create(string_pk=cls.pk)
content_type_pk = ContentType.objects.get_for_model(ModelWithStringPrimaryKey).pk
user_pk = cls.superuser.pk
LogEntry.objects.log_action(user_pk, content_type_pk, cls.pk, cls.pk, 2, change_message='Changed something')
def setUp(self):
self.client.force_login(self.superuser)
def test_get_history_view(self):
"""
Retrieving the history for an object using urlencoded form of primary
key should work.
Refs #12349, #18550.
"""
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_history', args=(self.pk,)))
self.assertContains(response, escape(self.pk))
self.assertContains(response, 'Changed something')
self.assertEqual(response.status_code, 200)
def test_get_change_view(self):
"Retrieving the object using urlencoded form of primary key should work"
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_change', args=(self.pk,)))
self.assertContains(response, escape(self.pk))
self.assertEqual(response.status_code, 200)
def test_changelist_to_changeform_link(self):
"Link to the changeform of the object in changelist should use reverse() and be quoted -- #18072"
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_changelist'))
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
pk_final_url = escape(iri_to_uri(quote(self.pk)))
change_url = reverse(
'admin:admin_views_modelwithstringprimarykey_change', args=('__fk__',)
).replace('__fk__', pk_final_url)
should_contain = '<th class="field-__str__"><a href="%s">%s</a></th>' % (change_url, escape(self.pk))
self.assertContains(response, should_contain)
def test_recentactions_link(self):
"The link from the recent actions list referring to the changeform of the object should be quoted"
response = self.client.get(reverse('admin:index'))
link = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(self.pk),))
should_contain = """<a href="%s">%s</a>""" % (escape(link), escape(self.pk))
self.assertContains(response, should_contain)
def test_deleteconfirmation_link(self):
"The link from the delete confirmation page referring back to the changeform of the object should be quoted"
url = reverse('admin:admin_views_modelwithstringprimarykey_delete', args=(quote(self.pk),))
response = self.client.get(url)
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
change_url = reverse(
'admin:admin_views_modelwithstringprimarykey_change', args=('__fk__',)
).replace('__fk__', escape(iri_to_uri(quote(self.pk))))
should_contain = '<a href="%s">%s</a>' % (change_url, escape(self.pk))
self.assertContains(response, should_contain)
def test_url_conflicts_with_add(self):
"A model with a primary key that ends with add or is `add` should be visible"
add_model = ModelWithStringPrimaryKey.objects.create(pk="i have something to add")
add_model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(add_model.pk),))
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
add_model2 = ModelWithStringPrimaryKey.objects.create(pk="add")
add_url = reverse('admin:admin_views_modelwithstringprimarykey_add')
change_url = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(add_model2.pk),))
self.assertNotEqual(add_url, change_url)
def test_url_conflicts_with_delete(self):
"A model with a primary key that ends with delete should be visible"
delete_model = ModelWithStringPrimaryKey(pk="delete")
delete_model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(delete_model.pk),))
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_url_conflicts_with_history(self):
"A model with a primary key that ends with history should be visible"
history_model = ModelWithStringPrimaryKey(pk="history")
history_model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(history_model.pk),))
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_shortcut_view_with_escaping(self):
"'View on site should' work properly with char fields"
model = ModelWithStringPrimaryKey(pk='abc_123')
model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(model.pk),))
)
should_contain = '/%s/" class="viewsitelink">' % model.pk
self.assertContains(response, should_contain)
def test_change_view_history_link(self):
"""Object history button link should work and contain the pk value quoted."""
url = reverse(
'admin:%s_modelwithstringprimarykey_change' % ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),)
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
expected_link = reverse(
'admin:%s_modelwithstringprimarykey_history' % ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),)
)
self.assertContains(response, '<a href="%s" class="historylink"' % escape(expected_link))
def test_redirect_on_add_view_continue_button(self):
"""As soon as an object is added using "Save and continue editing"
button, the user should be redirected to the object's change_view.
In case primary key is a string containing some special characters
like slash or underscore, these characters must be escaped (see #22266)
"""
response = self.client.post(
reverse('admin:admin_views_modelwithstringprimarykey_add'),
{
'string_pk': '123/history',
"_continue": "1", # Save and continue editing
}
)
self.assertEqual(response.status_code, 302) # temporary redirect
self.assertIn('/123_2Fhistory/', response['location']) # PK is quoted
@override_settings(ROOT_URLCONF='admin_views.urls')
class SecureViewTests(TestCase):
"""
Test behavior of a view protected by the staff_member_required decorator.
"""
def test_secure_view_shows_login_if_not_logged_in(self):
"""
Ensure that we see the admin login form.
"""
secure_url = reverse('secure_view')
response = self.client.get(secure_url)
self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), secure_url))
response = self.client.get(secure_url, follow=True)
self.assertTemplateUsed(response, 'admin/login.html')
self.assertEqual(response.context[REDIRECT_FIELD_NAME], secure_url)
def test_staff_member_required_decorator_works_with_argument(self):
"""
Ensure that staff_member_required decorator works with an argument
(redirect_field_name).
"""
secure_url = '/test_admin/admin/secure-view2/'
response = self.client.get(secure_url)
self.assertRedirects(response, '%s?myfield=%s' % (reverse('admin:login'), secure_url))
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewUnicodeTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.b1 = Book.objects.create(name='Lærdommer')
cls.p1 = Promo.objects.create(name='<Promo for Lærdommer>', book=cls.b1)
cls.chap1 = Chapter.objects.create(
title='Norske bostaver æøå skaper problemer', content='<p>Svært frustrerende med UnicodeDecodeErro</p>',
book=cls.b1
)
cls.chap2 = Chapter.objects.create(
title='Kjærlighet', content='<p>La kjærligheten til de lidende seire.</p>', book=cls.b1)
cls.chap3 = Chapter.objects.create(title='Kjærlighet', content='<p>Noe innhold</p>', book=cls.b1)
cls.chap4 = ChapterXtra1.objects.create(chap=cls.chap1, xtra='<Xtra(1) Norske bostaver æøå skaper problemer>')
cls.chap5 = ChapterXtra1.objects.create(chap=cls.chap2, xtra='<Xtra(1) Kjærlighet>')
cls.chap6 = ChapterXtra1.objects.create(chap=cls.chap3, xtra='<Xtra(1) Kjærlighet>')
cls.chap7 = ChapterXtra2.objects.create(chap=cls.chap1, xtra='<Xtra(2) Norske bostaver æøå skaper problemer>')
cls.chap8 = ChapterXtra2.objects.create(chap=cls.chap2, xtra='<Xtra(2) Kjærlighet>')
cls.chap9 = ChapterXtra2.objects.create(chap=cls.chap3, xtra='<Xtra(2) Kjærlighet>')
def setUp(self):
self.client.force_login(self.superuser)
def test_unicode_edit(self):
"""
A test to ensure that POST on edit_view handles non-ASCII characters.
"""
post_data = {
"name": "Test lærdommer",
# inline data
"chapter_set-TOTAL_FORMS": "6",
"chapter_set-INITIAL_FORMS": "3",
"chapter_set-MAX_NUM_FORMS": "0",
"chapter_set-0-id": self.chap1.pk,
"chapter_set-0-title": "Norske bostaver æøå skaper problemer",
"chapter_set-0-content": "<p>Svært frustrerende med UnicodeDecodeError</p>",
"chapter_set-1-id": self.chap2.id,
"chapter_set-1-title": "Kjærlighet.",
"chapter_set-1-content": "<p>La kjærligheten til de lidende seire.</p>",
"chapter_set-2-id": self.chap3.id,
"chapter_set-2-title": "Need a title.",
"chapter_set-2-content": "<p>Newest content</p>",
"chapter_set-3-id": "",
"chapter_set-3-title": "",
"chapter_set-3-content": "",
"chapter_set-4-id": "",
"chapter_set-4-title": "",
"chapter_set-4-content": "",
"chapter_set-5-id": "",
"chapter_set-5-title": "",
"chapter_set-5-content": "",
}
response = self.client.post(reverse('admin:admin_views_book_change', args=(self.b1.pk,)), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_unicode_delete(self):
"""
Ensure that the delete_view handles non-ASCII characters
"""
delete_dict = {'post': 'yes'}
delete_url = reverse('admin:admin_views_book_delete', args=(self.b1.pk,))
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 200)
response = self.client.post(delete_url, delete_dict)
self.assertRedirects(response, reverse('admin:admin_views_book_changelist'))
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewListEditable(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False)
cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_inheritance(self):
Podcast.objects.create(name="This Week in Django", release_date=datetime.date.today())
response = self.client.get(reverse('admin:admin_views_podcast_changelist'))
self.assertEqual(response.status_code, 200)
def test_inheritance_2(self):
Vodcast.objects.create(name="This Week in Django", released=True)
response = self.client.get(reverse('admin:admin_views_vodcast_changelist'))
self.assertEqual(response.status_code, 200)
def test_custom_pk(self):
Language.objects.create(iso='en', name='English', english_name='English')
response = self.client.get(reverse('admin:admin_views_language_changelist'))
self.assertEqual(response.status_code, 200)
def test_changelist_input_html(self):
response = self.client.get(reverse('admin:admin_views_person_changelist'))
# 2 inputs per object(the field and the hidden id field) = 6
# 4 management hidden fields = 4
# 4 action inputs (3 regular checkboxes, 1 checkbox to select all)
# main form submit button = 1
# search field and search submit button = 2
# CSRF field = 1
# field to track 'select all' across paginated views = 1
# 6 + 4 + 4 + 1 + 2 + 1 + 1 = 19 inputs
self.assertContains(response, "<input", count=19)
# 1 select per object = 3 selects
self.assertContains(response, "<select", count=4)
def test_post_messages(self):
# Ticket 12707: Saving inline editable should not show admin
# action warnings
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "%s" % self.per1.pk,
"form-1-gender": "2",
"form-1-id": "%s" % self.per2.pk,
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "%s" % self.per3.pk,
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_person_changelist'),
data, follow=True)
self.assertEqual(len(response.context['messages']), 1)
def test_post_submission(self):
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "%s" % self.per1.pk,
"form-1-gender": "2",
"form-1-id": "%s" % self.per2.pk,
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "%s" % self.per3.pk,
"_save": "Save",
}
self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
# test a filtered page
data = {
"form-TOTAL_FORMS": "2",
"form-INITIAL_FORMS": "2",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "%s" % self.per1.pk,
"form-0-gender": "1",
"form-0-alive": "checked",
"form-1-id": "%s" % self.per3.pk,
"form-1-gender": "1",
"form-1-alive": "checked",
"_save": "Save",
}
self.client.post(reverse('admin:admin_views_person_changelist') + '?gender__exact=1', data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, True)
# test a searched page
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "%s" % self.per1.pk,
"form-0-gender": "1",
"_save": "Save",
}
self.client.post(reverse('admin:admin_views_person_changelist') + '?q=john', data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
def test_non_field_errors(self):
''' Ensure that non field errors are displayed for each of the
forms in the changelist's formset. Refs #13126.
'''
fd1 = FoodDelivery.objects.create(reference='123', driver='bill', restaurant='thai')
fd2 = FoodDelivery.objects.create(reference='456', driver='bill', restaurant='india')
fd3 = FoodDelivery.objects.create(reference='789', driver='bill', restaurant='pizza')
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "pizza",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_fooddelivery_changelist'), data)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
'with this Driver and Restaurant already exists.</li></ul></td></tr>',
1,
html=True
)
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
# Same data also.
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "thai",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_fooddelivery_changelist'), data)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
'with this Driver and Restaurant already exists.</li></ul></td></tr>',
2,
html=True
)
def test_non_form_errors(self):
# test if non-form errors are handled; ticket #12716
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "%s" % self.per2.pk,
"form-0-alive": "1",
"form-0-gender": "2",
# Ensure that the form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertContains(response, "Grace is not a Zombie")
def test_non_form_errors_is_errorlist(self):
# test if non-form errors are correctly handled; ticket #12878
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "%s" % self.per2.pk,
"form-0-alive": "1",
"form-0-gender": "2",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_person_changelist'), data)
non_form_errors = response.context['cl'].formset.non_form_errors()
self.assertIsInstance(non_form_errors, ErrorList)
self.assertEqual(str(non_form_errors), str(ErrorList(["Grace is not a Zombie"])))
def test_list_editable_ordering(self):
collector = Collector.objects.create(id=1, name="Frederick Clegg")
Category.objects.create(id=1, order=1, collector=collector)
Category.objects.create(id=2, order=2, collector=collector)
Category.objects.create(id=3, order=0, collector=collector)
Category.objects.create(id=4, order=0, collector=collector)
# NB: The order values must be changed so that the items are reordered.
data = {
"form-TOTAL_FORMS": "4",
"form-INITIAL_FORMS": "4",
"form-MAX_NUM_FORMS": "0",
"form-0-order": "14",
"form-0-id": "1",
"form-0-collector": "1",
"form-1-order": "13",
"form-1-id": "2",
"form-1-collector": "1",
"form-2-order": "1",
"form-2-id": "3",
"form-2-collector": "1",
"form-3-order": "0",
"form-3-id": "4",
"form-3-collector": "1",
# Ensure that the form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_category_changelist'), data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# Check that the order values have been applied to the right objects
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
def test_list_editable_pagination(self):
"""
Ensure that pagination works for list_editable items.
Refs #16819.
"""
UnorderedObject.objects.create(id=1, name='Unordered object #1')
UnorderedObject.objects.create(id=2, name='Unordered object #2')
UnorderedObject.objects.create(id=3, name='Unordered object #3')
response = self.client.get(reverse('admin:admin_views_unorderedobject_changelist'))
self.assertContains(response, 'Unordered object #3')
self.assertContains(response, 'Unordered object #2')
self.assertNotContains(response, 'Unordered object #1')
response = self.client.get(reverse('admin:admin_views_unorderedobject_changelist') + '?p=1')
self.assertNotContains(response, 'Unordered object #3')
self.assertNotContains(response, 'Unordered object #2')
self.assertContains(response, 'Unordered object #1')
def test_list_editable_action_submit(self):
# List editable changes should not be executed if the action "Go" button is
# used to submit the form.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "1",
"form-1-gender": "2",
"form-1-id": "2",
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "3",
"index": "0",
"_selected_action": ['3'],
"action": ['', 'delete_selected'],
}
self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, True)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 1)
def test_list_editable_action_choices(self):
# List editable changes should be executed if the "Save" button is
# used to submit the form - any action choices should be ignored.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "%s" % self.per1.pk,
"form-1-gender": "2",
"form-1-id": "%s" % self.per2.pk,
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "%s" % self.per3.pk,
"_save": "Save",
"_selected_action": ['1'],
"action": ['', 'delete_selected'],
}
self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
def test_list_editable_popup(self):
"""
Fields should not be list-editable in popups.
"""
response = self.client.get(reverse('admin:admin_views_person_changelist'))
self.assertNotEqual(response.context['cl'].list_editable, ())
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?%s' % IS_POPUP_VAR)
self.assertEqual(response.context['cl'].list_editable, ())
def test_pk_hidden_fields(self):
""" Ensure that hidden pk fields aren't displayed in the table body and
that their corresponding human-readable value is displayed instead.
Note that the hidden pk fields are in fact be displayed but
separately (not in the table), and only once.
Refs #12475.
"""
story1 = Story.objects.create(title='The adventures of Guido', content='Once upon a time in Djangoland...')
story2 = Story.objects.create(
title='Crouching Tiger, Hidden Python',
content='The Python was sneaking into...',
)
response = self.client.get(reverse('admin:admin_views_story_changelist'))
# Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-0-id"', 1)
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(
response,
'<div class="hiddenfields">\n'
'<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" />'
'<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id" />\n</div>'
% (story2.id, story1.id),
html=True
)
self.assertContains(response, '<td class="field-id">%d</td>' % story1.id, 1)
self.assertContains(response, '<td class="field-id">%d</td>' % story2.id, 1)
def test_pk_hidden_fields_with_list_display_links(self):
""" Similarly as test_pk_hidden_fields, but when the hidden pk fields are
referenced in list_display_links.
Refs #12475.
"""
story1 = OtherStory.objects.create(
title='The adventures of Guido',
content='Once upon a time in Djangoland...',
)
story2 = OtherStory.objects.create(
title='Crouching Tiger, Hidden Python',
content='The Python was sneaking into...',
)
link1 = reverse('admin:admin_views_otherstory_change', args=(story1.pk,))
link2 = reverse('admin:admin_views_otherstory_change', args=(story2.pk,))
response = self.client.get(reverse('admin:admin_views_otherstory_changelist'))
# Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-0-id"', 1)
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(
response,
'<div class="hiddenfields">\n'
'<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" />'
'<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id" />\n</div>'
% (story2.id, story1.id),
html=True
)
self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link1, story1.id), 1)
self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link2, story2.id), 1)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminSearchTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False)
cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True)
cls.t1 = Recommender.objects.create()
cls.t2 = Recommendation.objects.create(recommender=cls.t1)
cls.t3 = Recommender.objects.create()
cls.t4 = Recommendation.objects.create(recommender=cls.t3)
cls.tt1 = TitleTranslation.objects.create(title=cls.t1, text='Bar')
cls.tt2 = TitleTranslation.objects.create(title=cls.t2, text='Foo')
cls.tt3 = TitleTranslation.objects.create(title=cls.t3, text='Few')
cls.tt4 = TitleTranslation.objects.create(title=cls.t4, text='Bas')
def setUp(self):
self.client.force_login(self.superuser)
def test_search_on_sibling_models(self):
"Check that a search that mentions sibling models"
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar')
# confirm the search returned 1 object
self.assertContains(response, "\n1 recommendation\n")
def test_with_fk_to_field(self):
"""
Ensure that the to_field GET parameter is preserved when a search
is performed. Refs #10918.
"""
response = self.client.get(reverse('admin:auth_user_changelist') + '?q=joe&%s=id' % TO_FIELD_VAR)
self.assertContains(response, "\n1 user\n")
self.assertContains(response, '<input type="hidden" name="%s" value="id"/>' % TO_FIELD_VAR, html=True)
def test_exact_matches(self):
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar')
# confirm the search returned one object
self.assertContains(response, "\n1 recommendation\n")
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=ba')
# confirm the search returned zero objects
self.assertContains(response, "\n0 recommendations\n")
def test_beginning_matches(self):
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=Gui')
# confirm the search returned one object
self.assertContains(response, "\n1 person\n")
self.assertContains(response, "Guido")
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=uido')
# confirm the search returned zero objects
self.assertContains(response, "\n0 persons\n")
self.assertNotContains(response, "Guido")
def test_pluggable_search(self):
PluggableSearchPerson.objects.create(name="Bob", age=10)
PluggableSearchPerson.objects.create(name="Amy", age=20)
response = self.client.get(reverse('admin:admin_views_pluggablesearchperson_changelist') + '?q=Bob')
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Bob")
response = self.client.get(reverse('admin:admin_views_pluggablesearchperson_changelist') + '?q=20')
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Amy")
def test_reset_link(self):
"""
Test presence of reset link in search bar ("1 result (_x total_)").
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
# + 1 for total count
with self.assertNumQueries(5):
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=Gui')
self.assertContains(
response,
"""<span class="small quiet">1 result (<a href="?">3 total</a>)</span>""",
html=True
)
def test_no_total_count(self):
"""
#8408 -- "Show all" should be displayed instead of the total count if
ModelAdmin.show_full_result_count is False.
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
with self.assertNumQueries(4):
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar')
self.assertContains(
response,
"""<span class="small quiet">1 result (<a href="?">Show all</a>)</span>""",
html=True
)
self.assertTrue(response.context['cl'].show_admin_actions)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminInheritedInlinesTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_inline(self):
"Ensure that inline models which inherit from a common parent are correctly handled by admin."
foo_user = "foo username"
bar_user = "bar username"
name_re = re.compile(b'name="(.*?)"')
# test the add case
response = self.client.get(reverse('admin:admin_views_persona_add'))
names = name_re.findall(response.content)
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
# test the add case
post_data = {
"name": "Test Name",
# inline data
"accounts-TOTAL_FORMS": "1",
"accounts-INITIAL_FORMS": "0",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": foo_user,
"accounts-2-TOTAL_FORMS": "1",
"accounts-2-INITIAL_FORMS": "0",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": bar_user,
}
response = self.client.post(reverse('admin:admin_views_persona_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
persona_id = Persona.objects.all()[0].id
foo_id = FooAccount.objects.all()[0].id
bar_id = BarAccount.objects.all()[0].id
# test the edit case
response = self.client.get(reverse('admin:admin_views_persona_change', args=(persona_id,)))
names = name_re.findall(response.content)
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
post_data = {
"name": "Test Name",
"accounts-TOTAL_FORMS": "2",
"accounts-INITIAL_FORMS": "1",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": "%s-1" % foo_user,
"accounts-0-account_ptr": str(foo_id),
"accounts-0-persona": str(persona_id),
"accounts-2-TOTAL_FORMS": "2",
"accounts-2-INITIAL_FORMS": "1",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": "%s-1" % bar_user,
"accounts-2-0-account_ptr": str(bar_id),
"accounts-2-0-persona": str(persona_id),
}
response = self.client.post(reverse('admin:admin_views_persona_change', args=(persona_id,)), post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, "%s-1" % foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, "%s-1" % bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminActionsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = ExternalSubscriber.objects.create(name='John Doe', email='[email protected]')
cls.s2 = Subscriber.objects.create(name='Max Mustermann', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_model_admin_custom_action(self):
"Tests a custom action defined in a ModelAdmin method"
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'mail_admin',
'index': 0,
}
self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a ModelAdmin action')
def test_model_admin_default_delete_action(self):
"Tests the default delete action defined as a ModelAdmin method"
action_data = {
ACTION_CHECKBOX_NAME: [1, 2],
'action': 'delete_selected',
'index': 0,
}
delete_confirmation_data = {
ACTION_CHECKBOX_NAME: [1, 2],
'action': 'delete_selected',
'post': 'yes',
}
confirmation = self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
self.assertIsInstance(confirmation, TemplateResponse)
self.assertContains(confirmation, "Are you sure you want to delete the selected subscribers?")
self.assertContains(confirmation, "<h2>Summary</h2>")
self.assertContains(confirmation, "<li>Subscribers: 2</li>")
self.assertContains(confirmation, "<li>External subscribers: 1</li>")
self.assertContains(confirmation, ACTION_CHECKBOX_NAME, count=2)
self.client.post(reverse('admin:admin_views_subscriber_changelist'), delete_confirmation_data)
self.assertEqual(Subscriber.objects.count(), 0)
@override_settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
def test_non_localized_pk(self):
"""If USE_THOUSAND_SEPARATOR is set, make sure that the ids for
the objects selected for deletion are rendered without separators.
Refs #14895.
"""
subscriber = Subscriber.objects.get(id=1)
subscriber.id = 9999
subscriber.save()
action_data = {
ACTION_CHECKBOX_NAME: [9999, 2],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
self.assertTemplateUsed(response, 'admin/delete_selected_confirmation.html')
self.assertContains(response, 'value="9999"') # Instead of 9,999
self.assertContains(response, 'value="2"')
def test_model_admin_default_delete_action_protected(self):
"""
Tests the default delete action defined as a ModelAdmin method in the
case where some related objects are protected from deletion.
"""
q1 = Question.objects.create(question="Why?")
a1 = Answer.objects.create(question=q1, answer="Because.")
a2 = Answer.objects.create(question=q1, answer="Yes.")
q2 = Question.objects.create(question="Wherefore?")
action_data = {
ACTION_CHECKBOX_NAME: [q1.pk, q2.pk],
'action': 'delete_selected',
'index': 0,
}
delete_confirmation_data = action_data.copy()
delete_confirmation_data['post'] = 'yes'
response = self.client.post(reverse('admin:admin_views_question_changelist'), action_data)
self.assertContains(response, "would require deleting the following protected related objects")
self.assertContains(
response,
'<li>Answer: <a href="%s">Because.</a></li>' % reverse('admin:admin_views_answer_change', args=(a1.pk,)),
html=True
)
self.assertContains(
response,
'<li>Answer: <a href="%s">Yes.</a></li>' % reverse('admin:admin_views_answer_change', args=(a2.pk,)),
html=True
)
# A POST request to delete protected objects should display the page
# which says the deletion is prohibited.
response = self.client.post(reverse('admin:admin_views_question_changelist'), delete_confirmation_data)
self.assertContains(response, "would require deleting the following protected related objects")
self.assertEqual(Question.objects.count(), 2)
def test_model_admin_default_delete_action_no_change_url(self):
"""
Default delete action shouldn't break if a user's ModelAdmin removes the url for change_view.
Regression test for #20640
"""
obj = UnchangeableObject.objects.create()
action_data = {
ACTION_CHECKBOX_NAME: obj.pk,
"action": "delete_selected",
"index": "0",
}
response = self.client.post(reverse('admin:admin_views_unchangeableobject_changelist'), action_data)
# No 500 caused by NoReverseMatch
self.assertEqual(response.status_code, 200)
# The page shouldn't display a link to the nonexistent change page
self.assertContains(response, "<li>Unchangeable object: UnchangeableObject object</li>", 1, html=True)
def test_custom_function_mail_action(self):
"Tests a custom action defined in a function"
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'external_mail',
'index': 0,
}
self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a function action')
def test_custom_function_action_with_redirect(self):
"Tests a custom action defined in a function"
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'redirect_to',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
self.assertEqual(response.status_code, 302)
def test_default_redirect(self):
"""
Test that actions which don't return an HttpResponse are redirected to
the same page, retaining the querystring (which may contain changelist
information).
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'external_mail',
'index': 0,
}
url = reverse('admin:admin_views_externalsubscriber_changelist') + '?o=1'
response = self.client.post(url, action_data)
self.assertRedirects(response, url)
def test_custom_function_action_streaming_response(self):
"""Tests a custom action that returns a StreamingHttpResponse."""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'download',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
content = b''.join(response.streaming_content)
self.assertEqual(content, b'This is the content of the file')
self.assertEqual(response.status_code, 200)
def test_custom_function_action_no_perm_response(self):
"""Tests a custom action that returns an HttpResponse with 403 code."""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'no_perm',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
self.assertEqual(response.status_code, 403)
self.assertEqual(response.content, b'No permission to perform this action')
def test_actions_ordering(self):
"""
Ensure that actions are ordered as expected.
Refs #15964.
"""
response = self.client.get(reverse('admin:admin_views_externalsubscriber_changelist'))
self.assertContains(response, '''<label>Action: <select name="action" required>
<option value="" selected="selected">---------</option>
<option value="delete_selected">Delete selected external
subscribers</option>
<option value="redirect_to">Redirect to (Awesome action)</option>
<option value="external_mail">External mail (Another awesome
action)</option>
<option value="download">Download subscription</option>
<option value="no_perm">No permission to run</option>
</select>''', html=True)
def test_model_without_action(self):
"Tests a ModelAdmin without any action"
response = self.client.get(reverse('admin:admin_views_oldsubscriber_changelist'))
self.assertIsNone(response.context["action_form"])
self.assertNotContains(
response, '<input type="checkbox" class="action-select"',
msg_prefix="Found an unexpected action toggle checkboxbox in response"
)
self.assertNotContains(response, '<input type="checkbox" class="action-select"')
def test_model_without_action_still_has_jquery(self):
"Tests that a ModelAdmin without any actions still gets jQuery included in page"
response = self.client.get(reverse('admin:admin_views_oldsubscriber_changelist'))
self.assertIsNone(response.context["action_form"])
self.assertContains(
response, 'jquery.min.js',
msg_prefix="jQuery missing from admin pages for model with no admin actions"
)
def test_action_column_class(self):
"Tests that the checkbox column class is present in the response"
response = self.client.get(reverse('admin:admin_views_subscriber_changelist'))
self.assertIsNotNone(response.context["action_form"])
self.assertContains(response, 'action-checkbox-column')
def test_multiple_actions_form(self):
"""
Test that actions come from the form whose submit button was pressed (#10618).
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
# Two different actions selected on the two forms...
'action': ['external_mail', 'delete_selected'],
# ...but we clicked "go" on the top form.
'index': 0
}
self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
# Send mail, don't delete.
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a function action')
def test_user_message_on_none_selected(self):
"""
User should see a warning when 'Go' is pressed and no items are selected.
"""
action_data = {
ACTION_CHECKBOX_NAME: [],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
msg = """Items must be selected in order to perform actions on them. No items have been changed."""
self.assertContains(response, msg)
self.assertEqual(Subscriber.objects.count(), 2)
def test_user_message_on_no_action(self):
"""
User should see a warning when 'Go' is pressed and no action is selected.
"""
action_data = {
ACTION_CHECKBOX_NAME: [1, 2],
'action': '',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
msg = """No action selected."""
self.assertContains(response, msg)
self.assertEqual(Subscriber.objects.count(), 2)
def test_selection_counter(self):
"""
Check if the selection counter is there.
"""
response = self.client.get(reverse('admin:admin_views_subscriber_changelist'))
self.assertContains(response, '0 of 2 selected')
def test_popup_actions(self):
""" Actions should not be shown in popups. """
response = self.client.get(reverse('admin:admin_views_subscriber_changelist'))
self.assertIsNotNone(response.context["action_form"])
response = self.client.get(
reverse('admin:admin_views_subscriber_changelist') + '?%s' % IS_POPUP_VAR)
self.assertIsNone(response.context["action_form"])
def test_popup_template_response(self):
"""
Success on popups shall be rendered from template in order to allow
easy customization.
"""
response = self.client.post(
reverse('admin:admin_views_actor_add') + '?%s=1' % IS_POPUP_VAR,
{'name': 'Troy McClure', 'age': '55', IS_POPUP_VAR: '1'})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.template_name, 'admin/popup_response.html')
def test_popup_template_escaping(self):
popup_response_data = json.dumps({
'new_value': 'new_value\\',
'obj': 'obj\\',
'value': 'value\\',
})
context = {
'popup_response_data': popup_response_data,
}
output = render_to_string('admin/popup_response.html', context)
self.assertIn(
r'"value\\"', output
)
self.assertIn(
r'"new_value\\"', output
)
self.assertIn(
r'"obj\\"', output
)
@override_settings(ROOT_URLCONF='admin_views.urls')
class TestCustomChangeList(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_custom_changelist(self):
"""
Validate that a custom ChangeList class can be used (#9749)
"""
# Insert some data
post_data = {"name": "First Gadget"}
response = self.client.post(reverse('admin:admin_views_gadget_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
# Hit the page once to get messages out of the queue message list
response = self.client.get(reverse('admin:admin_views_gadget_changelist'))
# Ensure that data is still not visible on the page
response = self.client.get(reverse('admin:admin_views_gadget_changelist'))
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'First Gadget')
@override_settings(ROOT_URLCONF='admin_views.urls')
class TestInlineNotEditable(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_GET_parent_add(self):
"""
InlineModelAdmin broken?
"""
response = self.client.get(reverse('admin:admin_views_parent_add'))
self.assertEqual(response.status_code, 200)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminCustomQuerysetTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
self.pks = [EmptyModel.objects.create().id for i in range(3)]
self.super_login = {
REDIRECT_FIELD_NAME: reverse('admin:index'),
'username': 'super',
'password': 'secret',
}
def test_changelist_view(self):
response = self.client.get(reverse('admin:admin_views_emptymodel_changelist'))
for i in self.pks:
if i > 1:
self.assertContains(response, 'Primary key = %s' % i)
else:
self.assertNotContains(response, 'Primary key = %s' % i)
def test_changelist_view_count_queries(self):
# create 2 Person objects
Person.objects.create(name='person1', gender=1)
Person.objects.create(name='person2', gender=2)
changelist_url = reverse('admin:admin_views_person_changelist')
# 5 queries are expected: 1 for the session, 1 for the user,
# 2 for the counts and 1 for the objects on the page
with self.assertNumQueries(5):
resp = self.client.get(changelist_url)
self.assertEqual(resp.context['selection_note'], '0 of 2 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 2 selected')
with self.assertNumQueries(5):
extra = {'q': 'not_in_name'}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context['selection_note'], '0 of 0 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 0 selected')
with self.assertNumQueries(5):
extra = {'q': 'person'}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context['selection_note'], '0 of 2 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 2 selected')
with self.assertNumQueries(5):
extra = {'gender__exact': '1'}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context['selection_note'], '0 of 1 selected')
self.assertEqual(resp.context['selection_note_all'], '1 selected')
def test_change_view(self):
for i in self.pks:
response = self.client.get(reverse('admin:admin_views_emptymodel_change', args=(i,)))
if i > 1:
self.assertEqual(response.status_code, 200)
else:
self.assertEqual(response.status_code, 404)
def test_add_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __str__ method
self.assertEqual(CoverLetter.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"author": "Candidate, Best",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_coverletter_add'), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name
pk = CoverLetter.objects.all()[0].pk
self.assertContains(
response,
'<li class="success">The cover letter "<a href="%s">'
'Candidate, Best</a>" was added successfully.</li>' %
reverse('admin:admin_views_coverletter_change', args=(pk,)), html=True
)
# model has no __str__ method
self.assertEqual(ShortMessage.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"content": "What's this SMS thing?",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_shortmessage_add'), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name
pk = ShortMessage.objects.all()[0].pk
self.assertContains(
response,
'<li class="success">The short message "<a href="%s">'
'ShortMessage object</a>" was added successfully.</li>' %
reverse('admin:admin_views_shortmessage_change', args=(pk,)), html=True
)
def test_add_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __str__ method
self.assertEqual(Telegram.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "Urgent telegram",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_telegram_add'), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name
pk = Telegram.objects.all()[0].pk
self.assertContains(
response,
'<li class="success">The telegram "<a href="%s">'
'Urgent telegram</a>" was added successfully.</li>' %
reverse('admin:admin_views_telegram_change', args=(pk,)), html=True
)
# model has no __str__ method
self.assertEqual(Paper.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_paper_add'), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name
pk = Paper.objects.all()[0].pk
self.assertContains(
response,
'<li class="success">The paper "<a href="%s">'
'Paper object</a>" was added successfully.</li>' %
reverse('admin:admin_views_paper_change', args=(pk,)), html=True
)
def test_edit_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __str__ method
cl = CoverLetter.objects.create(author="John Doe")
self.assertEqual(CoverLetter.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_coverletter_change', args=(cl.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"author": "John Doe II",
"_save": "Save",
}
url = reverse('admin:admin_views_coverletter_change', args=(cl.pk,))
response = self.client.post(url, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name. Instance
# representation is set by model's __str__()
self.assertContains(
response,
'<li class="success">The cover letter "<a href="%s">'
'John Doe II</a>" was changed successfully.</li>' %
reverse('admin:admin_views_coverletter_change', args=(cl.pk,)), html=True
)
# model has no __str__ method
sm = ShortMessage.objects.create(content="This is expensive")
self.assertEqual(ShortMessage.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"content": "Too expensive",
"_save": "Save",
}
url = reverse('admin:admin_views_shortmessage_change', args=(sm.pk,))
response = self.client.post(url, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by six.text_type()
self.assertContains(
response,
'<li class="success">The short message "<a href="%s">'
'ShortMessage object</a>" was changed successfully.</li>' %
reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)), html=True
)
def test_edit_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __str__ method
t = Telegram.objects.create(title="Frist Telegram")
self.assertEqual(Telegram.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_telegram_change', args=(t.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "Telegram without typo",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_telegram_change', args=(t.pk,)), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name. The instance
# representation is set by model's __str__()
self.assertContains(
response,
'<li class="success">The telegram "<a href="%s">'
'Telegram without typo</a>" was changed successfully.</li>' %
reverse('admin:admin_views_telegram_change', args=(t.pk,)), html=True
)
# model has no __str__ method
p = Paper.objects.create(title="My Paper Title")
self.assertEqual(Paper.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_paper_change', args=(p.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_paper_change', args=(p.pk,)), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by six.text_type()
self.assertContains(
response,
'<li class="success">The paper "<a href="%s">'
'Paper object</a>" was changed successfully.</li>' %
reverse('admin:admin_views_paper_change', args=(p.pk,)), html=True
)
def test_history_view_custom_qs(self):
"""
Ensure that custom querysets are considered for the admin history view.
Refs #21013.
"""
self.client.post(reverse('admin:login'), self.super_login)
FilteredManager.objects.create(pk=1)
FilteredManager.objects.create(pk=2)
response = self.client.get(reverse('admin:admin_views_filteredmanager_changelist'))
self.assertContains(response, "PK=1")
self.assertContains(response, "PK=2")
self.assertEqual(
self.client.get(reverse('admin:admin_views_filteredmanager_history', args=(1,))).status_code, 200
)
self.assertEqual(
self.client.get(reverse('admin:admin_views_filteredmanager_history', args=(2,))).status_code, 200
)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminInlineFileUploadTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
# Set up test Picture and Gallery.
# These must be set up here instead of in fixtures in order to allow Picture
# to use a NamedTemporaryFile.
file1 = tempfile.NamedTemporaryFile(suffix=".file1")
file1.write(b'a' * (2 ** 21))
filename = file1.name
file1.close()
self.gallery = Gallery(name="Test Gallery")
self.gallery.save()
self.picture = Picture(name="Test Picture", image=filename, gallery=self.gallery)
self.picture.save()
def test_inline_file_upload_edit_validation_error_post(self):
"""
Test that inline file uploads correctly display prior data (#10002).
"""
post_data = {
"name": "Test Gallery",
"pictures-TOTAL_FORMS": "2",
"pictures-INITIAL_FORMS": "1",
"pictures-MAX_NUM_FORMS": "0",
"pictures-0-id": six.text_type(self.picture.id),
"pictures-0-gallery": six.text_type(self.gallery.id),
"pictures-0-name": "Test Picture",
"pictures-0-image": "",
"pictures-1-id": "",
"pictures-1-gallery": str(self.gallery.id),
"pictures-1-name": "Test Picture 2",
"pictures-1-image": "",
}
response = self.client.post(
reverse('admin:admin_views_gallery_change', args=(self.gallery.id,)), post_data
)
self.assertContains(response, b"Currently")
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminInlineTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.post_data = {
"name": "Test Name",
"widget_set-TOTAL_FORMS": "3",
"widget_set-INITIAL_FORMS": "0",
"widget_set-MAX_NUM_FORMS": "0",
"widget_set-0-id": "",
"widget_set-0-owner": "1",
"widget_set-0-name": "",
"widget_set-1-id": "",
"widget_set-1-owner": "1",
"widget_set-1-name": "",
"widget_set-2-id": "",
"widget_set-2-owner": "1",
"widget_set-2-name": "",
"doohickey_set-TOTAL_FORMS": "3",
"doohickey_set-INITIAL_FORMS": "0",
"doohickey_set-MAX_NUM_FORMS": "0",
"doohickey_set-0-owner": "1",
"doohickey_set-0-code": "",
"doohickey_set-0-name": "",
"doohickey_set-1-owner": "1",
"doohickey_set-1-code": "",
"doohickey_set-1-name": "",
"doohickey_set-2-owner": "1",
"doohickey_set-2-code": "",
"doohickey_set-2-name": "",
"grommet_set-TOTAL_FORMS": "3",
"grommet_set-INITIAL_FORMS": "0",
"grommet_set-MAX_NUM_FORMS": "0",
"grommet_set-0-code": "",
"grommet_set-0-owner": "1",
"grommet_set-0-name": "",
"grommet_set-1-code": "",
"grommet_set-1-owner": "1",
"grommet_set-1-name": "",
"grommet_set-2-code": "",
"grommet_set-2-owner": "1",
"grommet_set-2-name": "",
"whatsit_set-TOTAL_FORMS": "3",
"whatsit_set-INITIAL_FORMS": "0",
"whatsit_set-MAX_NUM_FORMS": "0",
"whatsit_set-0-owner": "1",
"whatsit_set-0-index": "",
"whatsit_set-0-name": "",
"whatsit_set-1-owner": "1",
"whatsit_set-1-index": "",
"whatsit_set-1-name": "",
"whatsit_set-2-owner": "1",
"whatsit_set-2-index": "",
"whatsit_set-2-name": "",
"fancydoodad_set-TOTAL_FORMS": "3",
"fancydoodad_set-INITIAL_FORMS": "0",
"fancydoodad_set-MAX_NUM_FORMS": "0",
"fancydoodad_set-0-doodad_ptr": "",
"fancydoodad_set-0-owner": "1",
"fancydoodad_set-0-name": "",
"fancydoodad_set-0-expensive": "on",
"fancydoodad_set-1-doodad_ptr": "",
"fancydoodad_set-1-owner": "1",
"fancydoodad_set-1-name": "",
"fancydoodad_set-1-expensive": "on",
"fancydoodad_set-2-doodad_ptr": "",
"fancydoodad_set-2-owner": "1",
"fancydoodad_set-2-name": "",
"fancydoodad_set-2-expensive": "on",
"category_set-TOTAL_FORMS": "3",
"category_set-INITIAL_FORMS": "0",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "",
"category_set-0-id": "",
"category_set-0-collector": "1",
"category_set-1-order": "",
"category_set-1-id": "",
"category_set-1-collector": "1",
"category_set-2-order": "",
"category_set-2-id": "",
"category_set-2-collector": "1",
}
self.client.force_login(self.superuser)
self.collector = Collector(pk=1, name='John Fowles')
self.collector.save()
def test_simple_inline(self):
"A simple model can be saved as inlines"
# First add a new inline
self.post_data['widget_set-0-name'] = "Widget 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
widget_id = Widget.objects.all()[0].id
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="widget_set-0-id"')
# Now resave that inline
self.post_data['widget_set-INITIAL_FORMS'] = "1"
self.post_data['widget_set-0-id'] = str(widget_id)
self.post_data['widget_set-0-name'] = "Widget 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
# Now modify that inline
self.post_data['widget_set-INITIAL_FORMS'] = "1"
self.post_data['widget_set-0-id'] = str(widget_id)
self.post_data['widget_set-0-name'] = "Widget 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1 Updated")
def test_explicit_autofield_inline(self):
"A model with an explicit autofield primary key can be saved as inlines. Regression for #8093"
# First add a new inline
self.post_data['grommet_set-0-name'] = "Grommet 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="grommet_set-0-code"')
# Now resave that inline
self.post_data['grommet_set-INITIAL_FORMS'] = "1"
self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code)
self.post_data['grommet_set-0-name'] = "Grommet 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# Now modify that inline
self.post_data['grommet_set-INITIAL_FORMS'] = "1"
self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code)
self.post_data['grommet_set-0-name'] = "Grommet 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1 Updated")
def test_char_pk_inline(self):
"A model with a character PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="doohickey_set-0-code"')
# Now resave that inline
self.post_data['doohickey_set-INITIAL_FORMS'] = "1"
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# Now modify that inline
self.post_data['doohickey_set-INITIAL_FORMS'] = "1"
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1 Updated")
def test_integer_pk_inline(self):
"A model with an integer PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="whatsit_set-0-index"')
# Now resave that inline
self.post_data['whatsit_set-INITIAL_FORMS'] = "1"
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# Now modify that inline
self.post_data['whatsit_set-INITIAL_FORMS'] = "1"
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1 Updated")
def test_inherited_inline(self):
"An inherited model can be saved as inlines. Regression for #11042"
# First add a new inline
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
doodad_pk = FancyDoodad.objects.all()[0].pk
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="fancydoodad_set-0-doodad_ptr"')
# Now resave that inline
self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1"
self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk)
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
# Now modify that inline
self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1"
self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk)
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1 Updated")
def test_ordered_inline(self):
"""Check that an inline with an editable ordering fields is
updated correctly. Regression for #10922"""
# Create some objects with an initial ordering
Category.objects.create(id=1, order=1, collector=self.collector)
Category.objects.create(id=2, order=2, collector=self.collector)
Category.objects.create(id=3, order=0, collector=self.collector)
Category.objects.create(id=4, order=0, collector=self.collector)
# NB: The order values must be changed so that the items are reordered.
self.post_data.update({
"name": "Frederick Clegg",
"category_set-TOTAL_FORMS": "7",
"category_set-INITIAL_FORMS": "4",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "14",
"category_set-0-id": "1",
"category_set-0-collector": "1",
"category_set-1-order": "13",
"category_set-1-id": "2",
"category_set-1-collector": "1",
"category_set-2-order": "1",
"category_set-2-id": "3",
"category_set-2-collector": "1",
"category_set-3-order": "0",
"category_set-3-id": "4",
"category_set-3-collector": "1",
"category_set-4-order": "",
"category_set-4-id": "",
"category_set-4-collector": "1",
"category_set-5-order": "",
"category_set-5-id": "",
"category_set-5-collector": "1",
"category_set-6-order": "",
"category_set-6-id": "",
"category_set-6-collector": "1",
})
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# Check that the order values have been applied to the right objects
self.assertEqual(self.collector.category_set.count(), 4)
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
@override_settings(ROOT_URLCONF='admin_views.urls')
class NeverCacheTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = Section.objects.create(name='Test section')
def setUp(self):
self.client.force_login(self.superuser)
def test_admin_index(self):
"Check the never-cache status of the main index"
response = self.client.get(reverse('admin:index'))
self.assertEqual(get_max_age(response), 0)
def test_app_index(self):
"Check the never-cache status of an application index"
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(get_max_age(response), 0)
def test_model_index(self):
"Check the never-cache status of a model index"
response = self.client.get(reverse('admin:admin_views_fabric_changelist'))
self.assertEqual(get_max_age(response), 0)
def test_model_add(self):
"Check the never-cache status of a model add page"
response = self.client.get(reverse('admin:admin_views_fabric_add'))
self.assertEqual(get_max_age(response), 0)
def test_model_view(self):
"Check the never-cache status of a model edit page"
response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
self.assertEqual(get_max_age(response), 0)
def test_model_history(self):
"Check the never-cache status of a model history page"
response = self.client.get(reverse('admin:admin_views_section_history', args=(self.s1.pk,)))
self.assertEqual(get_max_age(response), 0)
def test_model_delete(self):
"Check the never-cache status of a model delete page"
response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,)))
self.assertEqual(get_max_age(response), 0)
def test_login(self):
"Check the never-cache status of login views"
self.client.logout()
response = self.client.get(reverse('admin:index'))
self.assertEqual(get_max_age(response), 0)
def test_logout(self):
"Check the never-cache status of logout view"
response = self.client.get(reverse('admin:logout'))
self.assertEqual(get_max_age(response), 0)
def test_password_change(self):
"Check the never-cache status of the password change view"
self.client.logout()
response = self.client.get(reverse('admin:password_change'))
self.assertIsNone(get_max_age(response))
def test_password_change_done(self):
"Check the never-cache status of the password change done view"
response = self.client.get(reverse('admin:password_change_done'))
self.assertIsNone(get_max_age(response))
def test_JS_i18n(self):
"Check the never-cache status of the JavaScript i18n view"
response = self.client.get(reverse('admin:jsi18n'))
self.assertIsNone(get_max_age(response))
@override_settings(ROOT_URLCONF='admin_views.urls')
class PrePopulatedTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def setUp(self):
self.client.force_login(self.superuser)
def test_prepopulated_on(self):
response = self.client.get(reverse('admin:admin_views_prepopulatedpost_add'))
self.assertContains(response, ""id": "#id_slug"")
self.assertContains(response, ""dependency_ids": ["#id_title"]")
self.assertContains(response, ""id": "#id_prepopulatedsubpost_set-0-subslug"")
def test_prepopulated_off(self):
response = self.client.get(reverse('admin:admin_views_prepopulatedpost_change', args=(self.p1.pk,)))
self.assertContains(response, "A Long Title")
self.assertNotContains(response, ""id": "#id_slug"")
self.assertNotContains(response, ""dependency_ids": ["#id_title"]")
self.assertNotContains(
response,
""id": "#id_prepopulatedsubpost_set-0-subslug""
)
@override_settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
def test_prepopulated_maxlength_localized(self):
"""
Regression test for #15938: if USE_THOUSAND_SEPARATOR is set, make sure
that maxLength (in the JavaScript) is rendered without separators.
"""
response = self.client.get(reverse('admin:admin_views_prepopulatedpostlargeslug_add'))
self.assertContains(response, ""maxLength": 1000") # instead of 1,000
@override_settings(ROOT_URLCONF='admin_views.urls')
class SeleniumTests(AdminSeleniumTestCase):
available_apps = ['admin_views'] + AdminSeleniumTestCase.available_apps
def setUp(self):
self.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
self.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def test_prepopulated_fields(self):
"""
Ensure that the JavaScript-automated prepopulated fields work with the
main form and with stacked and tabular inlines.
Refs #13068, #9264, #9983, #9784.
"""
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_mainprepopulated_add'))
# Main form ----------------------------------------------------------
self.selenium.find_element_by_id('id_pubdate').send_keys('2012-02-18')
self.get_select_option('#id_status', 'option two').click()
self.selenium.find_element_by_id('id_name').send_keys(' this is the mAin nÀMë and it\'s awεšomeııı')
slug1 = self.selenium.find_element_by_id('id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_slug2').get_attribute('value')
slug3 = self.selenium.find_element_by_id('id_slug3').get_attribute('value')
self.assertEqual(slug1, 'main-name-and-its-awesomeiii-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-and-its-awesomeiii')
self.assertEqual(slug3, 'main-n\xe0m\xeb-and-its-aw\u03b5\u0161ome\u0131\u0131\u0131')
# Stacked inlines ----------------------------------------------------
# Initial inline
self.selenium.find_element_by_id('id_relatedprepopulated_set-0-pubdate').send_keys('2011-12-17')
self.get_select_option('#id_relatedprepopulated_set-0-status', 'option one').click()
self.selenium.find_element_by_id('id_relatedprepopulated_set-0-name').send_keys(
' here is a sŤāÇkeð inline ! '
)
slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-0-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-0-slug2').get_attribute('value')
self.assertEqual(slug1, 'here-stacked-inline-2011-12-17')
self.assertEqual(slug2, 'option-one-here-stacked-inline')
# Add an inline
self.selenium.find_elements_by_link_text('Add another Related prepopulated')[0].click()
self.selenium.find_element_by_id('id_relatedprepopulated_set-1-pubdate').send_keys('1999-01-25')
self.get_select_option('#id_relatedprepopulated_set-1-status', 'option two').click()
self.selenium.find_element_by_id('id_relatedprepopulated_set-1-name').send_keys(
' now you haVe anöther sŤāÇkeð inline with a very ... '
'loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooog text... '
)
slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-1-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-1-slug2').get_attribute('value')
# 50 characters maximum for slug1 field
self.assertEqual(slug1, 'now-you-have-another-stacked-inline-very-loooooooo')
# 60 characters maximum for slug2 field
self.assertEqual(slug2, 'option-two-now-you-have-another-stacked-inline-very-looooooo')
# Tabular inlines ----------------------------------------------------
# Initial inline
self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-pubdate').send_keys('1234-12-07')
self.get_select_option('#id_relatedprepopulated_set-2-0-status', 'option two').click()
self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-name').send_keys(
'And now, with a tÃbűlaŘ inline !!!'
)
slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-slug2').get_attribute('value')
self.assertEqual(slug1, 'and-now-tabular-inline-1234-12-07')
self.assertEqual(slug2, 'option-two-and-now-tabular-inline')
# Add an inline
self.selenium.find_elements_by_link_text('Add another Related prepopulated')[1].click()
self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-pubdate').send_keys('1981-08-22')
self.get_select_option('#id_relatedprepopulated_set-2-1-status', 'option one').click()
self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-name').send_keys(
'a tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters'
)
slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-slug2').get_attribute('value')
self.assertEqual(slug1, 'tabular-inline-ignored-characters-1981-08-22')
self.assertEqual(slug2, 'option-one-tabular-inline-ignored-characters')
# Save and check that everything is properly stored in the database
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.assertEqual(MainPrepopulated.objects.all().count(), 1)
MainPrepopulated.objects.get(
name=' this is the mAin nÀMë and it\'s awεšomeııı',
pubdate='2012-02-18',
status='option two',
slug1='main-name-and-its-awesomeiii-2012-02-18',
slug2='option-two-main-name-and-its-awesomeiii',
)
self.assertEqual(RelatedPrepopulated.objects.all().count(), 4)
RelatedPrepopulated.objects.get(
name=' here is a sŤāÇkeð inline ! ',
pubdate='2011-12-17',
status='option one',
slug1='here-stacked-inline-2011-12-17',
slug2='option-one-here-stacked-inline',
)
RelatedPrepopulated.objects.get(
# 75 characters in name field
name=' now you haVe anöther sŤāÇkeð inline with a very ... loooooooooooooooooo',
pubdate='1999-01-25',
status='option two',
slug1='now-you-have-another-stacked-inline-very-loooooooo',
slug2='option-two-now-you-have-another-stacked-inline-very-looooooo',
)
RelatedPrepopulated.objects.get(
name='And now, with a tÃbűlaŘ inline !!!',
pubdate='1234-12-07',
status='option two',
slug1='and-now-tabular-inline-1234-12-07',
slug2='option-two-and-now-tabular-inline',
)
RelatedPrepopulated.objects.get(
name='a tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters',
pubdate='1981-08-22',
status='option one',
slug1='tabular-inline-ignored-characters-1981-08-22',
slug2='option-one-tabular-inline-ignored-characters',
)
def test_populate_existing_object(self):
"""
Ensure that the prepopulation works for existing objects too, as long
as the original field is empty.
Refs #19082.
"""
# Slugs are empty to start with.
item = MainPrepopulated.objects.create(
name=' this is the mAin nÀMë',
pubdate='2012-02-18',
status='option two',
slug1='',
slug2='',
)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
object_url = self.live_server_url + reverse('admin:admin_views_mainprepopulated_change', args=(item.id,))
self.selenium.get(object_url)
self.selenium.find_element_by_id('id_name').send_keys(' the best')
# The slugs got prepopulated since they were originally empty
slug1 = self.selenium.find_element_by_id('id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_slug2').get_attribute('value')
self.assertEqual(slug1, 'main-name-best-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-best')
# Save the object
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.selenium.get(object_url)
self.selenium.find_element_by_id('id_name').send_keys(' hello')
# The slugs got prepopulated didn't change since they were originally not empty
slug1 = self.selenium.find_element_by_id('id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_slug2').get_attribute('value')
self.assertEqual(slug1, 'main-name-best-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-best')
def test_collapsible_fieldset(self):
"""
Test that the 'collapse' class in fieldsets definition allows to
show/hide the appropriate field section.
"""
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_article_add'))
self.assertFalse(self.selenium.find_element_by_id('id_title').is_displayed())
self.selenium.find_elements_by_link_text('Show')[0].click()
self.assertTrue(self.selenium.find_element_by_id('id_title').is_displayed())
self.assertEqual(self.selenium.find_element_by_id('fieldsetcollapser0').text, "Hide")
def test_first_field_focus(self):
"""JavaScript-assisted auto-focus on first usable form field."""
# First form field has a single widget
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_picture_add'))
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element_by_id('id_name')
)
# First form field has a MultiWidget
self.selenium.get(self.live_server_url + reverse('admin:admin_views_reservation_add'))
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element_by_id('id_start_date_0')
)
def test_cancel_delete_confirmation(self):
"Cancelling the deletion of an object takes the user back one page."
pizza = Pizza.objects.create(name="Double Cheese")
url = reverse('admin:admin_views_pizza_change', args=(pizza.id,))
full_url = self.live_server_url + url
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(full_url)
self.selenium.find_element_by_class_name('deletelink').click()
# Click 'cancel' on the delete page.
self.selenium.find_element_by_class_name('cancel-link').click()
# Wait until we're back on the change page.
self.wait_for_text('#content h1', 'Change pizza')
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
def test_cancel_delete_related_confirmation(self):
"""
Cancelling the deletion of an object with relations takes the user back
one page.
"""
pizza = Pizza.objects.create(name="Double Cheese")
topping1 = Topping.objects.create(name="Cheddar")
topping2 = Topping.objects.create(name="Mozzarella")
pizza.toppings.add(topping1, topping2)
url = reverse('admin:admin_views_pizza_change', args=(pizza.id,))
full_url = self.live_server_url + url
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(full_url)
self.selenium.find_element_by_class_name('deletelink').click()
# Click 'cancel' on the delete page.
self.selenium.find_element_by_class_name('cancel-link').click()
# Wait until we're back on the change page.
self.wait_for_text('#content h1', 'Change pizza')
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
self.assertEqual(Topping.objects.count(), 2)
def test_list_editable_popups(self):
"""
list_editable foreign keys have add/change popups.
"""
from selenium.webdriver.support.ui import Select
s1 = Section.objects.create(name='Test section')
Article.objects.create(
title='foo',
content='<p>Middle content</p>',
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=s1,
)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_article_changelist'))
# Change popup
self.selenium.find_element_by_id('change_id_form-0-section').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
self.wait_for_text('#content h1', 'Change section')
name_input = self.selenium.find_element_by_id('id_name')
name_input.clear()
name_input.send_keys('<i>edited section</i>')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_form-0-section'))
self.assertEqual(select.first_selected_option.text, '<i>edited section</i>')
# Add popup
self.selenium.find_element_by_id('add_id_form-0-section').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
self.wait_for_text('#content h1', 'Add section')
self.selenium.find_element_by_id('id_name').send_keys('new section')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_form-0-section'))
self.assertEqual(select.first_selected_option.text, 'new section')
def test_inline_uuid_pk_edit_with_popup(self):
from selenium.webdriver.support.ui import Select
parent = ParentWithUUIDPK.objects.create(title='test')
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_change', args=(related_with_parent.id,))
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element_by_id('change_id_parent').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_parent'))
self.assertEqual(select.first_selected_option.text, str(parent.id))
self.assertEqual(select.first_selected_option.get_attribute('value'), str(parent.id))
def test_inline_uuid_pk_add_with_popup(self):
from selenium.webdriver.support.ui import Select
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_relatedwithuuidpkmodel_add'))
self.selenium.find_element_by_id('add_id_parent').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
self.selenium.find_element_by_id('id_title').send_keys('test')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_parent'))
uuid_id = str(ParentWithUUIDPK.objects.first().id)
self.assertEqual(select.first_selected_option.text, uuid_id)
self.assertEqual(select.first_selected_option.get_attribute('value'), uuid_id)
def test_inline_uuid_pk_delete_with_popup(self):
from selenium.webdriver.support.ui import Select
parent = ParentWithUUIDPK.objects.create(title='test')
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_change', args=(related_with_parent.id,))
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element_by_id('delete_id_parent').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
self.selenium.find_element_by_xpath('//input[@value="Yes, I\'m sure"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_parent'))
self.assertEqual(ParentWithUUIDPK.objects.count(), 0)
self.assertEqual(select.first_selected_option.text, '---------')
self.assertEqual(select.first_selected_option.get_attribute('value'), '')
def test_list_editable_raw_id_fields(self):
parent = ParentWithUUIDPK.objects.create(title='test')
parent2 = ParentWithUUIDPK.objects.create(title='test2')
RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_changelist', current_app=site2.name)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element_by_id('lookup_id_form-0-parent').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
# Select "parent2" in the popup.
self.selenium.find_element_by_link_text(str(parent2.pk)).click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
# The newly selected pk should appear in the raw id input.
value = self.selenium.find_element_by_id('id_form-0-parent').get_attribute('value')
self.assertEqual(value, str(parent2.pk))
@override_settings(ROOT_URLCONF='admin_views.urls')
class ReadonlyTest(AdminFieldExtractionMixin, TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
@ignore_warnings(category=RemovedInDjango20Warning) # for allow_tags deprecation
def test_readonly_get(self):
response = self.client.get(reverse('admin:admin_views_post_add'))
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'name="posted"')
# 3 fields + 2 submit buttons + 5 inline management form fields, + 2
# hidden fields for inlines + 1 field for the inline + 2 empty form
self.assertContains(response, "<input", count=15)
self.assertContains(response, formats.localize(datetime.date.today()))
self.assertContains(response, "<label>Awesomeness level:</label>")
self.assertContains(response, "Very awesome.")
self.assertContains(response, "Unknown coolness.")
self.assertContains(response, "foo")
# Checks that multiline text in a readonly field gets <br /> tags
self.assertContains(response, "Multiline<br />test<br />string")
self.assertContains(response, "<p>Multiline<br />html<br />content</p>", html=True)
self.assertContains(response, "InlineMultiline<br />test<br />string")
# Remove only this last line when the deprecation completes.
self.assertContains(response, "<p>Multiline<br />html<br />content<br />with allow tags</p>", html=True)
self.assertContains(response, formats.localize(datetime.date.today() - datetime.timedelta(days=7)))
self.assertContains(response, '<div class="form-row field-coolness">')
self.assertContains(response, '<div class="form-row field-awesomeness_level">')
self.assertContains(response, '<div class="form-row field-posted">')
self.assertContains(response, '<div class="form-row field-value">')
self.assertContains(response, '<div class="form-row">')
self.assertContains(response, '<p class="help">', 3)
self.assertContains(
response,
'<p class="help">Some help text for the title (with unicode ŠĐĆŽćžšđ)</p>',
html=True
)
self.assertContains(
response,
'<p class="help">Some help text for the content (with unicode ŠĐĆŽćžšđ)</p>',
html=True
)
self.assertContains(
response,
'<p class="help">Some help text for the date (with unicode ŠĐĆŽćžšđ)</p>',
html=True
)
p = Post.objects.create(title="I worked on readonly_fields", content="Its good stuff")
response = self.client.get(reverse('admin:admin_views_post_change', args=(p.pk,)))
self.assertContains(response, "%d amount of cool" % p.pk)
@ignore_warnings(category=RemovedInDjango20Warning) # for allow_tags deprecation
def test_readonly_text_field(self):
p = Post.objects.create(
title="Readonly test", content="test",
readonly_content='test\r\n\r\ntest\r\n\r\ntest\r\n\r\ntest',
)
Link.objects.create(
url="http://www.djangoproject.com", post=p,
readonly_link_content="test\r\nlink",
)
response = self.client.get(reverse('admin:admin_views_post_change', args=(p.pk,)))
# Checking readonly field.
self.assertContains(response, 'test<br /><br />test<br /><br />test<br /><br />test')
# Checking readonly field in inline.
self.assertContains(response, 'test<br />link')
def test_readonly_post(self):
data = {
"title": "Django Got Readonly Fields",
"content": "This is an incredible development.",
"link_set-TOTAL_FORMS": "1",
"link_set-INITIAL_FORMS": "0",
"link_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse('admin:admin_views_post_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 1)
p = Post.objects.get()
self.assertEqual(p.posted, datetime.date.today())
data["posted"] = "10-8-1990" # some date that's not today
response = self.client.post(reverse('admin:admin_views_post_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 2)
p = Post.objects.order_by('-id')[0]
self.assertEqual(p.posted, datetime.date.today())
def test_readonly_manytomany(self):
"Regression test for #13004"
response = self.client.get(reverse('admin:admin_views_pizza_add'))
self.assertEqual(response.status_code, 200)
def test_user_password_change_limited_queryset(self):
su = User.objects.filter(is_superuser=True)[0]
response = self.client.get(reverse('admin2:auth_user_password_change', args=(su.pk,)))
self.assertEqual(response.status_code, 404)
def test_change_form_renders_correct_null_choice_value(self):
"""
Regression test for #17911.
"""
choice = Choice.objects.create(choice=None)
response = self.client.get(reverse('admin:admin_views_choice_change', args=(choice.pk,)))
self.assertContains(response, '<p>No opinion</p>', html=True)
self.assertNotContains(response, '<p>(None)</p>')
def test_readonly_manytomany_backwards_ref(self):
"""
Regression test for #16433 - backwards references for related objects
broke if the related field is read-only due to the help_text attribute
"""
topping = Topping.objects.create(name='Salami')
pizza = Pizza.objects.create(name='Americano')
pizza.toppings.add(topping)
response = self.client.get(reverse('admin:admin_views_topping_add'))
self.assertEqual(response.status_code, 200)
def test_readonly_onetoone_backwards_ref(self):
"""
Can reference a reverse OneToOneField in ModelAdmin.readonly_fields.
"""
v1 = Villain.objects.create(name='Adam')
pl = Plot.objects.create(name='Test Plot', team_leader=v1, contact=v1)
pd = PlotDetails.objects.create(details='Brand New Plot', plot=pl)
response = self.client.get(reverse('admin:admin_views_plotproxy_change', args=(pl.pk,)))
field = self.get_admin_readonly_field(response, 'plotdetails')
self.assertEqual(field.contents(), 'Brand New Plot')
# The reverse relation also works if the OneToOneField is null.
pd.plot = None
pd.save()
response = self.client.get(reverse('admin:admin_views_plotproxy_change', args=(pl.pk,)))
field = self.get_admin_readonly_field(response, 'plotdetails')
self.assertEqual(field.contents(), '-') # default empty value
@ignore_warnings(category=RemovedInDjango20Warning) # for allow_tags deprecation
def test_readonly_field_overrides(self):
"""
Regression test for #22087 - ModelForm Meta overrides are ignored by
AdminReadonlyField
"""
p = FieldOverridePost.objects.create(title="Test Post", content="Test Content")
response = self.client.get(reverse('admin:admin_views_fieldoverridepost_change', args=(p.pk,)))
self.assertContains(response, '<p class="help">Overridden help text for the date</p>')
self.assertContains(response, '<label for="id_public">Overridden public label:</label>', html=True)
self.assertNotContains(response, "Some help text for the date (with unicode ŠĐĆŽćžšđ)")
def test_correct_autoescaping(self):
"""
Make sure that non-field readonly elements are properly autoescaped (#24461)
"""
section = Section.objects.create(name='<a>evil</a>')
response = self.client.get(reverse('admin:admin_views_section_change', args=(section.pk,)))
self.assertNotContains(response, "<a>evil</a>", status_code=200)
self.assertContains(response, "<a>evil</a>", status_code=200)
@override_settings(ROOT_URLCONF='admin_views.urls')
class LimitChoicesToInAdminTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_limit_choices_to_as_callable(self):
"""Test for ticket 2445 changes to admin."""
threepwood = Character.objects.create(
username='threepwood',
last_action=datetime.datetime.today() + datetime.timedelta(days=1),
)
marley = Character.objects.create(
username='marley',
last_action=datetime.datetime.today() - datetime.timedelta(days=1),
)
response = self.client.get(reverse('admin:admin_views_stumpjoke_add'))
# The allowed option should appear twice; the limited option should not appear.
self.assertContains(response, threepwood.username, count=2)
self.assertNotContains(response, marley.username)
@override_settings(ROOT_URLCONF='admin_views.urls')
class RawIdFieldsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_limit_choices_to(self):
"""Regression test for 14880"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True,
leader=actor,
country="England")
Inquisition.objects.create(expected=False,
leader=actor,
country="Spain")
response = self.client.get(reverse('admin:admin_views_sketch_add'))
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_inquisition"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step also tests integers, strings and booleans in the
# lookup query string; in model we define inquisition field to have a
# limit_choices_to option that includes a filter on a string field
# (inquisition__actor__name), a filter on an integer field
# (inquisition__actor__age), and a filter on a boolean field
# (inquisition__expected).
response2 = self.client.get(popup_url)
self.assertContains(response2, "Spain")
self.assertNotContains(response2, "England")
def test_limit_choices_to_isnull_false(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse('admin:admin_views_sketch_add'))
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant0"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=0 gets parsed correctly from the
# lookup query string; in model we define defendant0 field to have a
# limit_choices_to option that includes "actor__title__isnull=False".
response2 = self.client.get(popup_url)
self.assertContains(response2, "Kilbraken")
self.assertNotContains(response2, "Palin")
def test_limit_choices_to_isnull_true(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse('admin:admin_views_sketch_add'))
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant1"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=1 gets parsed correctly from the
# lookup query string; in model we define defendant1 field to have a
# limit_choices_to option that includes "actor__title__isnull=True".
response2 = self.client.get(popup_url)
self.assertNotContains(response2, "Kilbraken")
self.assertContains(response2, "Palin")
def test_list_display_method_same_name_as_reverse_accessor(self):
"""
Should be able to use a ModelAdmin method in list_display that has the
same name as a reverse model field ("sketch" in this case).
"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True, leader=actor, country="England")
response = self.client.get(reverse('admin:admin_views_inquisition_changelist'))
self.assertContains(response, 'list-display-sketch')
@override_settings(ROOT_URLCONF='admin_views.urls')
class UserAdminTest(TestCase):
"""
Tests user CRUD functionality.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.adduser = User.objects.create_user(username='adduser', password='secret', is_staff=True)
cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False)
cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_button(self):
user_count = User.objects.count()
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
})
new_user = User.objects.get(username='newuser')
self.assertRedirects(response, reverse('admin:auth_user_change', args=(new_user.pk,)))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_save_continue_editing_button(self):
user_count = User.objects.count()
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_continue': '1',
})
new_user = User.objects.get(username='newuser')
self.assertRedirects(response, reverse('admin:auth_user_change', args=(new_user.pk,)))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_password_mismatch(self):
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'mismatch',
})
self.assertEqual(response.status_code, 200)
adminform = response.context['adminform']
self.assertNotIn('password', adminform.form.errors)
self.assertEqual(adminform.form.errors['password2'], ["The two password fields didn't match."])
def test_user_fk_add_popup(self):
"""User addition through a FK popup should return the appropriate JavaScript response."""
response = self.client.get(reverse('admin:admin_views_album_add'))
self.assertContains(response, reverse('admin:auth_user_add'))
self.assertContains(response, 'class="related-widget-wrapper-link add-related" id="add_id_owner"')
response = self.client.get(reverse('admin:auth_user_add') + '?_popup=1')
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_popup': '1',
'_save': '1',
}
response = self.client.post(reverse('admin:auth_user_add') + '?_popup=1', data, follow=True)
self.assertContains(response, '"obj": "newuser"')
def test_user_fk_change_popup(self):
"""User change through a FK popup should return the appropriate JavaScript response."""
response = self.client.get(reverse('admin:admin_views_album_add'))
self.assertContains(response, reverse('admin:auth_user_change', args=('__fk__',)))
self.assertContains(response, 'class="related-widget-wrapper-link change-related" id="change_id_owner"')
user = User.objects.get(username='changeuser')
url = reverse('admin:auth_user_change', args=(user.pk,)) + '?_popup=1'
response = self.client.get(url)
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'last_login_0': '2007-05-30',
'last_login_1': '13:20:10',
'date_joined_0': '2007-05-30',
'date_joined_1': '13:20:10',
'_popup': '1',
'_save': '1',
}
response = self.client.post(url, data, follow=True)
self.assertContains(response, '"obj": "newuser"')
self.assertContains(response, '"action": "change"')
def test_user_fk_delete_popup(self):
"""User deletion through a FK popup should return the appropriate JavaScript response."""
response = self.client.get(reverse('admin:admin_views_album_add'))
self.assertContains(response, reverse('admin:auth_user_delete', args=('__fk__',)))
self.assertContains(response, 'class="related-widget-wrapper-link change-related" id="change_id_owner"')
user = User.objects.get(username='changeuser')
url = reverse('admin:auth_user_delete', args=(user.pk,)) + '?_popup=1'
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = {
'post': 'yes',
'_popup': '1',
}
response = self.client.post(url, data, follow=True)
self.assertContains(response, '"action": "delete"')
def test_save_add_another_button(self):
user_count = User.objects.count()
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_addanother': '1',
})
new_user = User.objects.order_by('-id')[0]
self.assertRedirects(response, reverse('admin:auth_user_add'))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_user_permission_performance(self):
u = User.objects.all()[0]
# Don't depend on a warm cache, see #17377.
ContentType.objects.clear_cache()
with self.assertNumQueries(10):
response = self.client.get(reverse('admin:auth_user_change', args=(u.pk,)))
self.assertEqual(response.status_code, 200)
def test_form_url_present_in_context(self):
u = User.objects.all()[0]
response = self.client.get(reverse('admin3:auth_user_password_change', args=(u.pk,)))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['form_url'], 'pony')
@override_settings(ROOT_URLCONF='admin_views.urls')
class GroupAdminTest(TestCase):
"""
Tests group CRUD functionality.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_save_button(self):
group_count = Group.objects.count()
response = self.client.post(reverse('admin:auth_group_add'), {
'name': 'newgroup',
})
Group.objects.order_by('-id')[0]
self.assertRedirects(response, reverse('admin:auth_group_changelist'))
self.assertEqual(Group.objects.count(), group_count + 1)
def test_group_permission_performance(self):
g = Group.objects.create(name="test_group")
# Ensure no queries are skipped due to cached content type for Group.
ContentType.objects.clear_cache()
with self.assertNumQueries(8):
response = self.client.get(reverse('admin:auth_group_change', args=(g.pk,)))
self.assertEqual(response.status_code, 200)
@override_settings(ROOT_URLCONF='admin_views.urls')
class CSSTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def setUp(self):
self.client.force_login(self.superuser)
@ignore_warnings(category=RemovedInDjango20Warning) # for allow_tags deprecation
def test_field_prefix_css_classes(self):
"""
Ensure that fields have a CSS class name with a 'field-' prefix.
Refs #16371.
"""
response = self.client.get(reverse('admin:admin_views_post_add'))
# The main form
self.assertContains(response, 'class="form-row field-title"')
self.assertContains(response, 'class="form-row field-content"')
self.assertContains(response, 'class="form-row field-public"')
self.assertContains(response, 'class="form-row field-awesomeness_level"')
self.assertContains(response, 'class="form-row field-coolness"')
self.assertContains(response, 'class="form-row field-value"')
self.assertContains(response, 'class="form-row"') # The lambda function
# The tabular inline
self.assertContains(response, '<td class="field-url">')
self.assertContains(response, '<td class="field-posted">')
def test_index_css_classes(self):
"""
Ensure that CSS class names are used for each app and model on the
admin index pages.
Refs #17050.
"""
# General index page
response = self.client.get(reverse('admin:index'))
self.assertContains(response, '<div class="app-admin_views module">')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
# App index page
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertContains(response, '<div class="app-admin_views module">')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
def test_app_model_in_form_body_class(self):
"""
Ensure app and model tag are correctly read by change_form template
"""
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_list_body_class(self):
"""
Ensure app and model tag are correctly read by change_list template
"""
response = self.client.get(reverse('admin:admin_views_section_changelist'))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_delete_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by delete_confirmation
template
"""
response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,)))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_app_index_body_class(self):
"""
Ensure app and model tag are correctly read by app_index template
"""
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertContains(response, '<body class=" dashboard app-admin_views')
def test_app_model_in_delete_selected_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by
delete_selected_confirmation template
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_section_changelist'), action_data)
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_changelist_field_classes(self):
"""
Cells of the change list table should contain the field name in their class attribute
Refs #11195.
"""
Podcast.objects.create(name="Django Dose", release_date=datetime.date.today())
response = self.client.get(reverse('admin:admin_views_podcast_changelist'))
self.assertContains(response, '<th class="field-name">')
self.assertContains(response, '<td class="field-release_date nowrap">')
self.assertContains(response, '<td class="action-checkbox">')
try:
import docutils
except ImportError:
docutils = None
@unittest.skipUnless(docutils, "no docutils installed.")
@override_settings(ROOT_URLCONF='admin_views.urls')
@modify_settings(INSTALLED_APPS={'append': ['django.contrib.admindocs', 'django.contrib.flatpages']})
class AdminDocsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_tags(self):
response = self.client.get(reverse('django-admindocs-tags'))
# The builtin tag group exists
self.assertContains(response, "<h2>Built-in tags</h2>", count=2, html=True)
# A builtin tag exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-autoescape">autoescape</h3>', html=True)
self.assertContains(response, '<li><a href="#built_in-autoescape">autoescape</a></li>', html=True)
# An app tag exists in both the index and detail
self.assertContains(response, '<h3 id="flatpages-get_flatpages">get_flatpages</h3>', html=True)
self.assertContains(response, '<li><a href="#flatpages-get_flatpages">get_flatpages</a></li>', html=True)
# The admin list tag group exists
self.assertContains(response, "<h2>admin_list</h2>", count=2, html=True)
# An admin list tag exists in both the index and detail
self.assertContains(response, '<h3 id="admin_list-admin_actions">admin_actions</h3>', html=True)
self.assertContains(response, '<li><a href="#admin_list-admin_actions">admin_actions</a></li>', html=True)
def test_filters(self):
response = self.client.get(reverse('django-admindocs-filters'))
# The builtin filter group exists
self.assertContains(response, "<h2>Built-in filters</h2>", count=2, html=True)
# A builtin filter exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-add">add</h3>', html=True)
self.assertContains(response, '<li><a href="#built_in-add">add</a></li>', html=True)
@override_settings(
ROOT_URLCONF='admin_views.urls',
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}],
USE_I18N=False,
)
class ValidXHTMLTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_lang_name_present(self):
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertNotContains(response, ' lang=""')
self.assertNotContains(response, ' xml:lang=""')
@override_settings(ROOT_URLCONF='admin_views.urls', USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
class DateHierarchyTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def tearDown(self):
formats.reset_format_cache()
def assert_non_localized_year(self, response, year):
"""Ensure that the year is not localized with
USE_THOUSAND_SEPARATOR. Refs #15234.
"""
self.assertNotContains(response, formats.number_format(year))
def assert_contains_year_link(self, response, date):
self.assertContains(response, '?release_date__year=%d"' % (date.year,))
def assert_contains_month_link(self, response, date):
self.assertContains(
response, '?release_date__month=%d&release_date__year=%d"' % (
date.month, date.year))
def assert_contains_day_link(self, response, date):
self.assertContains(
response, '?release_date__day=%d&'
'release_date__month=%d&release_date__year=%d"' % (
date.day, date.month, date.year))
def test_empty(self):
"""
Ensure that no date hierarchy links display with empty changelist.
"""
response = self.client.get(
reverse('admin:admin_views_podcast_changelist'))
self.assertNotContains(response, 'release_date__year=')
self.assertNotContains(response, 'release_date__month=')
self.assertNotContains(response, 'release_date__day=')
def test_single(self):
"""
Ensure that single day-level date hierarchy appears for single object.
"""
DATE = datetime.date(2000, 6, 30)
Podcast.objects.create(release_date=DATE)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
self.assert_contains_day_link(response, DATE)
self.assert_non_localized_year(response, 2000)
def test_within_month(self):
"""
Ensure that day-level links appear for changelist within single month.
"""
DATES = (datetime.date(2000, 6, 30),
datetime.date(2000, 6, 15),
datetime.date(2000, 6, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
for date in DATES:
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_within_year(self):
"""
Ensure that month-level links appear for changelist within single year.
"""
DATES = (datetime.date(2000, 1, 30),
datetime.date(2000, 3, 15),
datetime.date(2000, 5, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
# no day-level links
self.assertNotContains(response, 'release_date__day=')
for date in DATES:
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_multiple_years(self):
"""
Ensure that year-level links appear for year-spanning changelist.
"""
DATES = (datetime.date(2001, 1, 30),
datetime.date(2003, 3, 15),
datetime.date(2005, 5, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
response = self.client.get(
reverse('admin:admin_views_podcast_changelist'))
# no day/month-level links
self.assertNotContains(response, 'release_date__day=')
self.assertNotContains(response, 'release_date__month=')
for date in DATES:
self.assert_contains_year_link(response, date)
# and make sure GET parameters still behave correctly
for date in DATES:
url = '%s?release_date__year=%d' % (
reverse('admin:admin_views_podcast_changelist'),
date.year)
response = self.client.get(url)
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
url = '%s?release_date__year=%d&release_date__month=%d' % (
reverse('admin:admin_views_podcast_changelist'),
date.year, date.month)
response = self.client.get(url)
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
def test_related_field(self):
questions_data = (
# (posted data, number of answers),
(datetime.date(2001, 1, 30), 0),
(datetime.date(2003, 3, 15), 1),
(datetime.date(2005, 5, 3), 2),
)
for date, answer_count in questions_data:
question = Question.objects.create(posted=date)
for i in range(answer_count):
question.answer_set.create()
response = self.client.get(reverse('admin:admin_views_answer_changelist'))
for date, answer_count in questions_data:
link = '?question__posted__year=%d"' % (date.year,)
if answer_count > 0:
self.assertContains(response, link)
else:
self.assertNotContains(response, link)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminCustomSaveRelatedTests(TestCase):
"""
Ensure that one can easily customize the way related objects are saved.
Refs #16115.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_should_be_able_to_edit_related_objects_on_add_view(self):
post = {
'child_set-TOTAL_FORMS': '3',
'child_set-INITIAL_FORMS': '0',
'name': 'Josh Stone',
'child_set-0-name': 'Paul',
'child_set-1-name': 'Catherine',
}
self.client.post(reverse('admin:admin_views_parent_add'), post)
self.assertEqual(1, Parent.objects.count())
self.assertEqual(2, Child.objects.count())
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
def test_should_be_able_to_edit_related_objects_on_change_view(self):
parent = Parent.objects.create(name='Josh Stone')
paul = Child.objects.create(parent=parent, name='Paul')
catherine = Child.objects.create(parent=parent, name='Catherine')
post = {
'child_set-TOTAL_FORMS': '5',
'child_set-INITIAL_FORMS': '2',
'name': 'Josh Stone',
'child_set-0-name': 'Paul',
'child_set-0-id': paul.id,
'child_set-1-name': 'Catherine',
'child_set-1-id': catherine.id,
}
self.client.post(reverse('admin:admin_views_parent_change', args=(parent.id,)), post)
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
def test_should_be_able_to_edit_related_objects_on_changelist_view(self):
parent = Parent.objects.create(name='Josh Rock')
Child.objects.create(parent=parent, name='Paul')
Child.objects.create(parent=parent, name='Catherine')
post = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '1',
'form-MAX_NUM_FORMS': '0',
'form-0-id': parent.id,
'form-0-name': 'Josh Stone',
'_save': 'Save'
}
self.client.post(reverse('admin:admin_views_parent_changelist'), post)
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewLogoutTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def test_logout(self):
self.client.force_login(self.superuser)
response = self.client.get(reverse('admin:logout'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'registration/logged_out.html')
self.assertEqual(response.request['PATH_INFO'], reverse('admin:logout'))
self.assertFalse(response.context['has_permission'])
self.assertNotContains(response, 'user-tools') # user-tools div shouldn't visible.
def test_client_logout_url_can_be_used_to_login(self):
response = self.client.get(reverse('admin:logout'))
self.assertEqual(response.status_code, 302) # we should be redirected to the login page.
# follow the redirect and test results.
response = self.client.get(reverse('admin:logout'), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'admin/login.html')
self.assertEqual(response.request['PATH_INFO'], reverse('admin:login'))
self.assertContains(response, '<input type="hidden" name="next" value="%s" />' % reverse('admin:index'))
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminUserMessageTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def send_message(self, level):
"""
Helper that sends a post to the dummy test methods and asserts that a
message with the level has appeared in the response.
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'message_%s' % level,
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_usermessenger_changelist'),
action_data, follow=True)
self.assertContains(response,
'<li class="%s">Test %s</li>' % (level, level),
html=True)
@override_settings(MESSAGE_LEVEL=10) # Set to DEBUG for this request
def test_message_debug(self):
self.send_message('debug')
def test_message_info(self):
self.send_message('info')
def test_message_success(self):
self.send_message('success')
def test_message_warning(self):
self.send_message('warning')
def test_message_error(self):
self.send_message('error')
def test_message_extra_tags(self):
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'message_extra_tags',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_usermessenger_changelist'),
action_data, follow=True)
self.assertContains(response,
'<li class="extra_tag info">Test tags</li>',
html=True)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminKeepChangeListFiltersTests(TestCase):
admin_site = site
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret')
def setUp(self):
self.client.force_login(self.superuser)
def assertURLEqual(self, url1, url2):
"""
Assert that two URLs are equal despite the ordering
of their querystring. Refs #22360.
"""
parsed_url1 = urlparse(url1)
path1 = parsed_url1.path
parsed_qs1 = dict(parse_qsl(parsed_url1.query))
parsed_url2 = urlparse(url2)
path2 = parsed_url2.path
parsed_qs2 = dict(parse_qsl(parsed_url2.query))
for parsed_qs in [parsed_qs1, parsed_qs2]:
if '_changelist_filters' in parsed_qs:
changelist_filters = parsed_qs['_changelist_filters']
parsed_filters = dict(parse_qsl(changelist_filters))
parsed_qs['_changelist_filters'] = parsed_filters
self.assertEqual(path1, path2)
self.assertEqual(parsed_qs1, parsed_qs2)
def test_assert_url_equal(self):
# Test equality.
change_user_url = reverse('admin:auth_user_change', args=(self.joepublicuser.pk,))
self.assertURLEqual(
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(
change_user_url
),
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(
change_user_url
)
)
# Test inequality.
with self.assertRaises(AssertionError):
self.assertURLEqual(
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(
change_user_url
),
'http://testserver{}?_changelist_filters=is_staff__exact%3D1%26is_superuser__exact%3D1'.format(
change_user_url
)
)
# Ignore scheme and host.
self.assertURLEqual(
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(
change_user_url
),
'{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url)
)
# Ignore ordering of querystring.
self.assertURLEqual(
'{}?is_staff__exact=0&is_superuser__exact=0'.format(reverse('admin:auth_user_changelist')),
'{}?is_superuser__exact=0&is_staff__exact=0'.format(reverse('admin:auth_user_changelist'))
)
# Ignore ordering of _changelist_filters.
self.assertURLEqual(
'{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url),
'{}?_changelist_filters=is_superuser__exact%3D0%26is_staff__exact%3D0'.format(change_user_url)
)
def get_changelist_filters(self):
return {
'is_superuser__exact': 0,
'is_staff__exact': 0,
}
def get_changelist_filters_querystring(self):
return urlencode(self.get_changelist_filters())
def get_preserved_filters_querystring(self):
return urlencode({
'_changelist_filters': self.get_changelist_filters_querystring()
})
def get_sample_user_id(self):
return self.joepublicuser.pk
def get_changelist_url(self):
return '%s?%s' % (
reverse('admin:auth_user_changelist',
current_app=self.admin_site.name),
self.get_changelist_filters_querystring(),
)
def get_add_url(self):
return '%s?%s' % (
reverse('admin:auth_user_add',
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_change_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_change', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_history_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_history', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_delete_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_delete', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def test_changelist_view(self):
response = self.client.get(self.get_changelist_url())
self.assertEqual(response.status_code, 200)
# Check the `change_view` link has the correct querystring.
detail_link = re.search(
'<a href="(.*?)">{}</a>'.format(self.joepublicuser.username),
force_text(response.content)
)
self.assertURLEqual(detail_link.group(1), self.get_change_url())
def test_change_view(self):
# Get the `change_view`.
response = self.client.get(self.get_change_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form enctype="multipart/form-data" action="(.*?)" method="post" id="user_form".*?>',
force_text(response.content)
)
self.assertURLEqual(form_action.group(1), '?%s' % self.get_preserved_filters_querystring())
# Check the history link.
history_link = re.search(
'<a href="(.*?)" class="historylink">History</a>',
force_text(response.content)
)
self.assertURLEqual(history_link.group(1), self.get_history_url())
# Check the delete link.
delete_link = re.search(
'<a href="(.*?)" class="deletelink">Delete</a>',
force_text(response.content)
)
self.assertURLEqual(delete_link.group(1), self.get_delete_url())
# Test redirect on "Save".
post_data = {
'username': 'joepublic',
'last_login_0': '2007-05-30',
'last_login_1': '13:20:10',
'date_joined_0': '2007-05-30',
'date_joined_1': '13:20:10',
}
post_data['_save'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_changelist_url()
)
post_data.pop('_save')
# Test redirect on "Save and continue".
post_data['_continue'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_change_url()
)
post_data.pop('_continue')
# Test redirect on "Save and add new".
post_data['_addanother'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_add_url()
)
post_data.pop('_addanother')
def test_add_view(self):
# Get the `add_view`.
response = self.client.get(self.get_add_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form enctype="multipart/form-data" action="(.*?)" method="post" id="user_form".*?>',
force_text(response.content)
)
self.assertURLEqual(form_action.group(1), '?%s' % self.get_preserved_filters_querystring())
post_data = {
'username': 'dummy',
'password1': 'test',
'password2': 'test',
}
# Test redirect on "Save".
post_data['_save'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_change_url(User.objects.get(username='dummy').pk)
)
post_data.pop('_save')
# Test redirect on "Save and continue".
post_data['username'] = 'dummy2'
post_data['_continue'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_change_url(User.objects.get(username='dummy2').pk)
)
post_data.pop('_continue')
# Test redirect on "Save and add new".
post_data['username'] = 'dummy3'
post_data['_addanother'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_add_url()
)
post_data.pop('_addanother')
def test_delete_view(self):
# Test redirect on "Delete".
response = self.client.post(self.get_delete_url(), {'post': 'yes'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_changelist_url()
)
def test_url_prefix(self):
context = {
'preserved_filters': self.get_preserved_filters_querystring(),
'opts': User._meta,
}
url = reverse('admin:auth_user_changelist', current_app=self.admin_site.name)
self.assertURLEqual(
self.get_changelist_url(),
add_preserved_filters(context, url),
)
with override_script_prefix('/prefix/'):
url = reverse('admin:auth_user_changelist', current_app=self.admin_site.name)
self.assertURLEqual(
self.get_changelist_url(),
add_preserved_filters(context, url),
)
class NamespacedAdminKeepChangeListFiltersTests(AdminKeepChangeListFiltersTests):
admin_site = site2
@override_settings(ROOT_URLCONF='admin_views.urls')
class TestLabelVisibility(TestCase):
""" #11277 -Labels of hidden fields in admin were not hidden. """
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_all_fields_visible(self):
response = self.client.get(reverse('admin:admin_views_emptymodelvisible_add'))
self.assert_fieldline_visible(response)
self.assert_field_visible(response, 'first')
self.assert_field_visible(response, 'second')
def test_all_fields_hidden(self):
response = self.client.get(reverse('admin:admin_views_emptymodelhidden_add'))
self.assert_fieldline_hidden(response)
self.assert_field_hidden(response, 'first')
self.assert_field_hidden(response, 'second')
def test_mixin(self):
response = self.client.get(reverse('admin:admin_views_emptymodelmixin_add'))
self.assert_fieldline_visible(response)
self.assert_field_hidden(response, 'first')
self.assert_field_visible(response, 'second')
def assert_field_visible(self, response, field_name):
self.assertContains(response, '<div class="field-box field-%s">' % field_name)
def assert_field_hidden(self, response, field_name):
self.assertContains(response, '<div class="field-box field-%s hidden">' % field_name)
def assert_fieldline_visible(self, response):
self.assertContains(response, '<div class="form-row field-first field-second">')
def assert_fieldline_hidden(self, response):
self.assertContains(response, '<div class="form-row hidden')
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewOnSiteTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = State.objects.create(name='New York')
cls.s2 = State.objects.create(name='Illinois')
cls.s3 = State.objects.create(name='California')
cls.c1 = City.objects.create(state=cls.s1, name='New York')
cls.c2 = City.objects.create(state=cls.s2, name='Chicago')
cls.c3 = City.objects.create(state=cls.s3, name='San Francisco')
cls.r1 = Restaurant.objects.create(city=cls.c1, name='Italian Pizza')
cls.r2 = Restaurant.objects.create(city=cls.c1, name='Boulevard')
cls.r3 = Restaurant.objects.create(city=cls.c2, name='Chinese Dinner')
cls.r4 = Restaurant.objects.create(city=cls.c2, name='Angels')
cls.r5 = Restaurant.objects.create(city=cls.c2, name='Take Away')
cls.r6 = Restaurant.objects.create(city=cls.c3, name='The Unknown Restaurant')
cls.w1 = Worker.objects.create(work_at=cls.r1, name='Mario', surname='Rossi')
cls.w2 = Worker.objects.create(work_at=cls.r1, name='Antonio', surname='Bianchi')
cls.w3 = Worker.objects.create(work_at=cls.r1, name='John', surname='Doe')
def setUp(self):
self.client.force_login(self.superuser)
def test_add_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data
"""
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {"family_name": "Test1",
"dependentchild_set-TOTAL_FORMS": "1",
"dependentchild_set-INITIAL_FORMS": "0",
"dependentchild_set-MAX_NUM_FORMS": "1",
"dependentchild_set-0-id": "",
"dependentchild_set-0-parent": "",
"dependentchild_set-0-family_name": "Test2"}
response = self.client.post(reverse('admin:admin_views_parentwithdependentchildren_add'),
post_data)
# just verifying the parent form failed validation, as expected --
# this isn't the regression test
self.assertIn('some_required_info', response.context['adminform'].form.errors)
# actual regression test
for error_set in response.context['inline_admin_formset'].formset.errors:
self.assertEqual(['Children must share a family name with their parents in this contrived test case'],
error_set.get('__all__'))
def test_change_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data
"""
pwdc = ParentWithDependentChildren.objects.create(some_required_info=6,
family_name="Test1")
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {"family_name": "Test2",
"dependentchild_set-TOTAL_FORMS": "1",
"dependentchild_set-INITIAL_FORMS": "0",
"dependentchild_set-MAX_NUM_FORMS": "1",
"dependentchild_set-0-id": "",
"dependentchild_set-0-parent": str(pwdc.id),
"dependentchild_set-0-family_name": "Test1"}
response = self.client.post(
reverse('admin:admin_views_parentwithdependentchildren_change', args=(pwdc.id,)), post_data
)
# just verifying the parent form failed validation, as expected --
# this isn't the regression test
self.assertIn('some_required_info', response.context['adminform'].form.errors)
# actual regression test
for error_set in response.context['inline_admin_formset'].formset.errors:
self.assertEqual(['Children must share a family name with their parents in this contrived test case'],
error_set.get('__all__'))
def test_check(self):
"Ensure that the view_on_site value is either a boolean or a callable"
try:
admin = CityAdmin(City, AdminSite())
CityAdmin.view_on_site = True
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = False
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = lambda obj: obj.get_absolute_url()
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = []
self.assertEqual(admin.check(), [
Error(
"The value of 'view_on_site' must be a callable or a boolean value.",
obj=CityAdmin,
id='admin.E025',
),
])
finally:
# Restore the original values for the benefit of other tests.
CityAdmin.view_on_site = True
def test_false(self):
"Ensure that the 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(reverse('admin:admin_views_restaurant_change', args=(self.r1.pk,)))
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertNotContains(response, reverse('admin:view_on_site', args=(content_type_pk, 1)))
def test_true(self):
"Ensure that the default behavior is followed if view_on_site is True"
response = self.client.get(reverse('admin:admin_views_city_change', args=(self.c1.pk,)))
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.c1.pk)))
def test_callable(self):
"Ensure that the right link is displayed if view_on_site is a callable"
response = self.client.get(reverse('admin:admin_views_worker_change', args=(self.w1.pk,)))
self.assertContains(response, '"/worker/%s/%s/"' % (self.w1.surname, self.w1.name))
def test_missing_get_absolute_url(self):
"Ensure None is returned if model doesn't have get_absolute_url"
model_admin = ModelAdmin(Worker, None)
self.assertIsNone(model_admin.get_view_on_site_url(Worker()))
@override_settings(ROOT_URLCONF='admin_views.urls')
class InlineAdminViewOnSiteTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = State.objects.create(name='New York')
cls.s2 = State.objects.create(name='Illinois')
cls.s3 = State.objects.create(name='California')
cls.c1 = City.objects.create(state=cls.s1, name='New York')
cls.c2 = City.objects.create(state=cls.s2, name='Chicago')
cls.c3 = City.objects.create(state=cls.s3, name='San Francisco')
cls.r1 = Restaurant.objects.create(city=cls.c1, name='Italian Pizza')
cls.r2 = Restaurant.objects.create(city=cls.c1, name='Boulevard')
cls.r3 = Restaurant.objects.create(city=cls.c2, name='Chinese Dinner')
cls.r4 = Restaurant.objects.create(city=cls.c2, name='Angels')
cls.r5 = Restaurant.objects.create(city=cls.c2, name='Take Away')
cls.r6 = Restaurant.objects.create(city=cls.c3, name='The Unknown Restaurant')
cls.w1 = Worker.objects.create(work_at=cls.r1, name='Mario', surname='Rossi')
cls.w2 = Worker.objects.create(work_at=cls.r1, name='Antonio', surname='Bianchi')
cls.w3 = Worker.objects.create(work_at=cls.r1, name='John', surname='Doe')
def setUp(self):
self.client.force_login(self.superuser)
def test_false(self):
"Ensure that the 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(reverse('admin:admin_views_state_change', args=(self.s1.pk,)))
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertNotContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.c1.pk)))
def test_true(self):
"Ensure that the 'View on site' button is displayed if view_on_site is True"
response = self.client.get(reverse('admin:admin_views_city_change', args=(self.c1.pk,)))
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.r1.pk)))
def test_callable(self):
"Ensure that the right link is displayed if view_on_site is a callable"
response = self.client.get(reverse('admin:admin_views_restaurant_change', args=(self.r1.pk,)))
self.assertContains(response, '"/worker_inline/%s/%s/"' % (self.w1.surname, self.w1.name))
@override_settings(ROOT_URLCONF='admin_views.urls')
class TestEtagWithAdminView(SimpleTestCase):
# See https://code.djangoproject.com/ticket/16003
def test_admin(self):
with self.settings(USE_ETAGS=False):
response = self.client.get(reverse('admin:index'))
self.assertEqual(response.status_code, 302)
self.assertFalse(response.has_header('ETag'))
with self.settings(USE_ETAGS=True):
response = self.client.get(reverse('admin:index'))
self.assertEqual(response.status_code, 302)
self.assertTrue(response.has_header('ETag'))
@override_settings(ROOT_URLCONF='admin_views.urls')
class GetFormsetsWithInlinesArgumentTest(TestCase):
"""
#23934 - When adding a new model instance in the admin, the 'obj' argument
of get_formsets_with_inlines() should be None. When changing, it should be
equal to the existing model instance.
The GetFormsetsArgumentCheckingAdmin ModelAdmin throws an exception
if obj is not None during add_view or obj is None during change_view.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_explicitly_provided_pk(self):
post_data = {'name': '1'}
response = self.client.post(reverse('admin:admin_views_explicitlyprovidedpk_add'), post_data)
self.assertEqual(response.status_code, 302)
post_data = {'name': '2'}
response = self.client.post(reverse('admin:admin_views_explicitlyprovidedpk_change', args=(1,)), post_data)
self.assertEqual(response.status_code, 302)
def test_implicitly_generated_pk(self):
post_data = {'name': '1'}
response = self.client.post(reverse('admin:admin_views_implicitlygeneratedpk_add'), post_data)
self.assertEqual(response.status_code, 302)
post_data = {'name': '2'}
response = self.client.post(reverse('admin:admin_views_implicitlygeneratedpk_change', args=(1,)), post_data)
self.assertEqual(response.status_code, 302)
| sgzsh269/django | tests/admin_views/tests.py | Python | bsd-3-clause | 281,730 |
from __future__ import print_function, division, absolute_import
from .fbresnet import fbresnet152
from .cafferesnet import cafferesnet101
from .bninception import bninception
from .resnext import resnext101_32x4d
from .resnext import resnext101_64x4d
from .inceptionv4 import inceptionv4
from .inceptionresnetv2 import inceptionresnetv2
from .nasnet import nasnetalarge
from .nasnet_mobile import nasnetamobile
from .torchvision_models import alexnet
from .torchvision_models import densenet121
from .torchvision_models import densenet169
from .torchvision_models import densenet201
from .torchvision_models import densenet161
from .torchvision_models import resnet18
from .torchvision_models import resnet34
from .torchvision_models import resnet50
from .torchvision_models import resnet101
from .torchvision_models import resnet152
from .torchvision_models import inceptionv3
from .torchvision_models import squeezenet1_0
from .torchvision_models import squeezenet1_1
from .torchvision_models import vgg11
from .torchvision_models import vgg11_bn
from .torchvision_models import vgg13
from .torchvision_models import vgg13_bn
from .torchvision_models import vgg16
from .torchvision_models import vgg16_bn
from .torchvision_models import vgg19_bn
from .torchvision_models import vgg19
from .dpn import dpn68
from .dpn import dpn68b
from .dpn import dpn92
from .dpn import dpn98
from .dpn import dpn131
from .dpn import dpn107
from .xception import xception
from .senet import senet154
from .senet import se_resnet50
from .senet import se_resnet101
from .senet import se_resnet152
from .senet import se_resnext50_32x4d
from .senet import se_resnext101_32x4d
from .pnasnet import pnasnet5large
from .polynet import polynet
| Cadene/pretrained-models.pytorch | pretrainedmodels/models/__init__.py | Python | bsd-3-clause | 1,735 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
__author__ = 'Daniel Greenfeld'
__email__ = '[email protected]'
__version__ = '3.2.1'
from .client import Eventbrite # noqa
from .utils import EVENTBRITE_API_URL # noqa
| apranav19/eventbrite-sdk-python | eventbrite/__init__.py | Python | apache-2.0 | 241 |
#
# (c) 2018 Extreme Networks Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from ansible.compat.tests.mock import patch
from ansible.modules.network.exos import exos_command
from units.modules.utils import set_module_args
from .exos_module import TestExosModule, load_fixture
class TestExosCommandModule(TestExosModule):
module = exos_command
def setUp(self):
super(TestExosCommandModule, self).setUp()
self.mock_run_commands = patch('ansible.modules.network.exos.exos_command.run_commands')
self.run_commands = self.mock_run_commands.start()
def tearDown(self):
super(TestExosCommandModule, self).tearDown()
self.mock_run_commands.stop()
def load_fixtures(self, commands=None):
def load_from_file(*args, **kwargs):
module, commands = args
output = list()
for item in commands:
try:
obj = json.loads(item['command'])
command = obj['command']
except ValueError:
command = item['command']
filename = str(command).replace(' ', '_')
output.append(load_fixture(filename))
return output
self.run_commands.side_effect = load_from_file
def test_exos_command_simple(self):
set_module_args(dict(commands=['show version']))
result = self.execute_module()
self.assertEqual(len(result['stdout']), 1)
self.assertTrue(result['stdout'][0].startswith('Switch :'))
def test_exos_command_multiple(self):
set_module_args(dict(commands=['show version', 'show version']))
result = self.execute_module()
self.assertEqual(len(result['stdout']), 2)
self.assertTrue(result['stdout'][0].startswith('Switch :'))
def test_exos_command_wait_for(self):
wait_for = 'result[0] contains "Switch :"'
set_module_args(dict(commands=['show version'], wait_for=wait_for))
self.execute_module()
def test_exos_command_wait_for_fails(self):
wait_for = 'result[0] contains "test string"'
set_module_args(dict(commands=['show version'], wait_for=wait_for))
self.execute_module(failed=True)
self.assertEqual(self.run_commands.call_count, 10)
def test_exos_command_retries(self):
wait_for = 'result[0] contains "test string"'
set_module_args(dict(commands=['show version'], wait_for=wait_for, retries=2))
self.execute_module(failed=True)
self.assertEqual(self.run_commands.call_count, 2)
def test_exos_command_match_any(self):
wait_for = ['result[0] contains "Switch"',
'result[0] contains "test string"']
set_module_args(dict(commands=['show version'], wait_for=wait_for, match='any'))
self.execute_module()
def test_exos_command_match_all(self):
wait_for = ['result[0] contains "Switch"',
'result[0] contains "Switch :"']
set_module_args(dict(commands=['show version'], wait_for=wait_for, match='all'))
self.execute_module()
def test_exos_command_match_all_failure(self):
wait_for = ['result[0] contains "Switch :"',
'result[0] contains "test string"']
commands = ['show version', 'show version']
set_module_args(dict(commands=commands, wait_for=wait_for, match='all'))
self.execute_module(failed=True)
def test_exos_command_configure_error(self):
commands = ['disable ospf']
set_module_args({
'commands': commands,
'_ansible_check_mode': True,
})
result = self.execute_module()
self.assertEqual(
result['warnings'],
['only show commands are supported when using check mode, not executing `disable ospf`']
)
| hryamzik/ansible | test/units/modules/network/exos/test_exos_command.py | Python | gpl-3.0 | 4,593 |
# -*- coding: utf-8 -*-
# Copyright 2019 OpenSynergy Indonesia
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from . import (
djbc_kite_lap_pemasukan_hasil_produksi,
djbc_kite_lap_pemasukan_hasil_produksi_subkon,
djbc_kite_lap_pemasukan_hasil_produksi_union,
)
| open-synergy/opnsynid-l10n-indonesia | l10n_id_djbc_kite_lap_pemasukan_hasil_produksi/reports/__init__.py | Python | agpl-3.0 | 291 |
# -*- coding: utf-8 -*-
import datetime
from django.conf import settings
DATAPURGE_GRACEFULLY = getattr(settings, "DATAPURGE_GRACEFULLY", False)
DATAPURGE_GRACE_STEP = getattr(settings, "DATAPURGE_GRACE_STEP", 100)
DATAPURGE_GRACE_WAIT = getattr(settings, "DATAPURGE_GRACE_WAIT", datetime.timedelta(seconds=0.3))
DATAPURGE_MODELS = getattr(settings, "DATAPURGE_MODELS", {}) | swistakm/django-datapurge | datapurge/settings.py | Python | bsd-3-clause | 375 |
# Load libraries
import sys
# Load modules
from modules.can_access import can_access
from modules.check_post_action import check_post_action
from modules.has_password_field import has_password_field
from modules.html_has_same_domain import html_has_same_domain
from modules.is_masquerading import is_masquerading
from modules.naver.uses_stylesheet_naver import uses_stylesheet_naver
from modules.naver.check_title import check_title
from modules.has_correct_favicon import has_correct_favicon
# Define constants
TRAINING_SET = "files/training_set.csv"
TRAINING_SET_ANALYZED = "files/training_set_analyzed.csv"
LOG = "logs/log"
ERROR_LOG = "logs/error_log"
def calculate_features():
def write_encoded_features(write_file, result):
result_encoded = "0"
if (result == "U"):
result_encoded = "0"
elif (result == "SL"):
result_encoded = "-0.5"
elif (result == "S"):
result_encoded = "-1"
elif (result == "PL"):
result_encoded = "0.5"
elif (result == "P"):
result_encoded = "1"
write_file.write("," + result_encoded)
with open(TRAINING_SET_ANALYZED, "w") as ts_analyzed: # this causes error when calling from a different path
# write header
ts_analyzed.write("url,is_phishing,is_masquerading,html_has_same_domain,has_password_field,check_post_action\n")
with open(TRAINING_SET, "r") as training_set:
lines = training_set.readlines()
counter = 0
can_access_error_count = 0
for line in lines[1:]:
try:
line = line.rstrip()
url = line.split(",")[0]
is_phishing = line.split(",")[1]
counter += 1
with open(LOG, "a+") as log:
log.write("Count " + str(counter) + ": " + url + "\n")
print("Count " + str(counter) + ": " + url)
log.close()
# if web page cannot be accessed, other modules will not work
result, resp, mod = can_access(url)
if result != "U":
ts_analyzed.write(url)
ts_analyzed.write("," + is_phishing)
# calculate each feature
result, mod = is_masquerading(url)
write_encoded_features(ts_analyzed, result)
result, mod = html_has_same_domain(url, resp)
write_encoded_features(ts_analyzed, result)
result, mod = has_password_field(resp)
write_encoded_features(ts_analyzed, result)
"""
result, mod = uses_stylesheet_naver(resp)
write_encoded_features(ts_analyzed, result)
result, mod = check_title(url, resp)
write_encoded_features(ts_analyzed, result)
result, mod = has_correct_favicon(url, resp)
write_encoded_features(ts_analyzed, result)
"""
result, mod = check_post_action(resp)
write_encoded_features(ts_analyzed, result)
ts_analyzed.write("\n")
else:
can_access_error_count += 1
with open(LOG, "a+") as log:
log.write("can_access error\n")
print("can_access error")
log.close()
except:
with open(ERROR_LOG, "a+") as error_log:
error_log.write("Count " + str(counter) + ": " + url + "\n")
error_log.write(sys.exc_info()[0])
error_log.close()
training_set.close()
ts_analyzed.close()
if __name__ == "__main__":
calculate_features() | mjkim610/phishing-detection | phishing_detection/process_data.py | Python | bsd-2-clause | 4,040 |
#!/usr/bin/env python
import sys
from gsp import GSP
from nautilusbb import Nautilusbb
from util import argmax_index
class Highend(Nautilusbb):
"""Balanced bidding agent"""
TOTALROUNDS = 48
RANGE= 6
def __init__(self, id, value, budget):
super(Highend, self).__init__(id,value, budget)
def bid(self, t, history, reserve):
# we bid less in the beginning rounds, bid more at the end
# when other agents are out of money
midpoint = self.TOTALROUNDS/2
error = self.RANGE
if t > self.TOTALROUNDS - self.RANGE:
return super(Highend, self).bid(t,history, reserve)
else:
return 0.5 * super(Highend, self).bid(t,history, reserve)
def __repr__(self):
return "%s(id=%d, value=%d)" % (
self.__class__.__name__, self.id, self.value)
| kandluis/balancedBidding | pset5_code/highend.py | Python | apache-2.0 | 854 |
# Define a procedure, sum3, that takes three
# inputs, and returns the sum of the three
# input numbers.
def sum3(aa, bb, cc):
return aa + bb + cc
#print sum3(1,2,3)
#>>> 6
#print sum3(93,53,70)
#>>> 216
| ezralalonde/cloaked-octo-sansa | 02/qu/02.py | Python | bsd-2-clause | 211 |
#!/usr/bin/python
# -*- coding: UTF-8 -*-
from setuptools import setup
setup(
name = "mdx_alerts",
version = "0.1.0",
py_modules = ["mdx_alerts"],
install_requires = ['Markdown>=2.2.0'],
author = "Saevon",
author_email = "[email protected]",
description = "Markdown alerts extension",
license = "MIT",
url = "https://github.com/saevon/markdown-alerts",
keywords = "markdown twitter bootstrap alerts",
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Text Processing :: Filters',
'Topic :: Text Processing :: Markup :: HTML'
],
)
| Saevon/mdx_alerts | setup.py | Python | mit | 877 |
from django.shortcuts import render
from django.views import generic
from django.http import HttpResponse, HttpResponseRedirect
# from django.views.decorators.csrf import csrf_exempt
import time
import json
import codecs
class IndexViews(generic.View):
templates_file = 'turanga.html'
def get(self, request):
context = {
}
return render(request,
self.templates_file,
context)
def crash(request):
print request
if request.method == "POST":
if 'log' in request.POST and request.POST['log']:
log = request.POST["log"]
else:
return HttpResponse("no log!")
if 'platform' in request.POST and request.POST['platform']:
service = request.POST['platform']
else:
return HttpResponse("no platform!")
pub_date = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
req = json.loads(log)
print req
with codecs.open('static/json/' + service + pub_date + '.json', 'w', 'utf-8') as f:
f.write(json.dumps(log, ensure_ascii=False, indent=1))
return HttpResponse("success!")
else:
return HttpResponse("request is not POST!") | CooloiStudio/Report_deskxd_com | turanga/views.py | Python | mit | 1,263 |
#!env python
import os
import sys
sys.path.append(
os.path.join(
os.environ.get( "SPLUNK_HOME", "/opt/splunk/6.1.3" ),
"etc/apps/framework/contrib/splunk-sdk-python/1.3.0",
)
)
from collections import Counter, OrderedDict
from math import log
from nltk import tokenize
import execnet
import json
from splunklib.searchcommands import Configuration, Option
from splunklib.searchcommands import dispatch, validators
from remote_commands import OptionRemoteStreamingCommand, ValidateLocalFile
@Configuration(clear_required_fields=False)
class MCPredict(OptionRemoteStreamingCommand):
model = Option(require=True, validate=ValidateLocalFile(mode='r',extension="pkl",subdir='classifiers',nohandle=True))
code = """
import os, sys, itertools, collections, numbers
try:
import cStringIO as StringIO
except:
import StringIO
import numpy as np
import scipy.sparse as sp
from multiclassify import process_records
from gensim.models import LsiModel, TfidfModel, LdaModel
from sklearn.linear_model import LogisticRegression
from sklearn.preprocessing import LabelEncoder
from sklearn.externals import joblib
if __name__ == "__channelexec__":
args = channel.receive()
records = []
for record in channel:
if not record:
break
records.append(record)
if records:
records = np.array(records)
# Try loading existing model
try:
model = joblib.load(args['model'])
encoder = model['encoder']
est = model['est']
target = model['target']
fields = model['fields']
if model.get('text'):
if model['text'] == 'lsi':
textmodel = LsiModel.load(args['model'].replace(".pkl",".%s" % model['text']))
elif model['text'] == 'tfidf':
textmodel = TfidfModel.load(args['model'].replace(".pkl",".%s" % model['text']))
else:
textmodel = model['text']
except Exception as e:
print >> sys.stderr, "ERROR", e
channel.send({ 'error': "Couldn't find model %s" % args['model']})
else:
X, y_labels, textmodel = process_records(records, fields, target, textmodel=textmodel)
print >> sys.stderr, X.shape
y = est.predict(X)
y_labels = encoder.inverse_transform(y)
for i, record in enumerate(records):
record['%s_predicted' % target] = y_labels.item(i)
channel.send(record)
"""
def __dir__(self):
return ['model']
dispatch(MCPredict, sys.argv, sys.stdin, sys.stdout, __name__)
| nlproc/splunkml | bin/mcpredict.py | Python | apache-2.0 | 2,357 |
# -*- coding: utf-8 -*-
import logging
try:
from cStringIO import StringIO # NOQA
except ImportError:
from io import StringIO # NOQA
try:
import importlib # NOQA
except ImportError:
from django.utils import importlib # NOQA
from django.core.management import call_command
from django.test import TestCase
class MockLoggingHandler(logging.Handler):
""" Mock logging handler to check for expected logs. """
def __init__(self, *args, **kwargs):
self.reset()
logging.Handler.__init__(self, *args, **kwargs)
def emit(self, record):
self.messages[record.levelname.lower()].append(record.getMessage())
def reset(self):
self.messages = {
'debug': [],
'info': [],
'warning': [],
'error': [],
'critical': [],
}
class CommandTest(TestCase):
def test_error_logging(self):
# Ensure command errors are properly logged and reraised
from django_extensions.management.base import logger
logger.addHandler(MockLoggingHandler())
module_path = "django_extensions.tests.management.commands.error_raising_command"
module = importlib.import_module(module_path)
error_raising_command = module.Command()
self.assertRaises(Exception, error_raising_command.execute)
handler = logger.handlers[0]
self.assertEqual(len(handler.messages['error']), 1)
class ShowTemplateTagsTests(TestCase):
def test_some_output(self):
out = StringIO()
call_command('show_templatetags', stdout=out)
output = out.getvalue()
# Once django_extension is installed during tests it should appear with
# its templatetags
self.assertIn('django_extensions', output)
# let's check at least one
self.assertIn('truncate_letters', output)
| WillisXChen/django-oscar | oscar/lib/python2.7/site-packages/django_extensions/tests/management_command.py | Python | bsd-3-clause | 1,869 |
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
from git.test.lib import *
from git.exc import *
from git.objects.submodule.base import Submodule
from git.objects.submodule.root import RootModule, RootUpdateProgress
from git.util import to_native_path_linux, join_path_native
import shutil
import git
import sys
import os
# Change the configuration if possible to prevent the underlying memory manager
# to keep file handles open. On windows we get problems as they are not properly
# closed due to mmap bugs on windows (as it appears)
if sys.platform == 'win32':
try:
import smmap.util
smmap.util.MapRegion._test_read_into_memory = True
except ImportError:
sys.stderr.write("The submodule tests will fail as some files cannot be removed due to open file handles.\n")
sys.stderr.write("The latest version of gitdb uses a memory map manager which can be configured to work around this problem")
#END handle windows platform
class TestRootProgress(RootUpdateProgress):
"""Just prints messages, for now without checking the correctness of the states"""
def update(self, op, index, max_count, message=''):
print message
prog = TestRootProgress()
class TestSubmodule(TestBase):
k_subm_current = "468cad66ff1f80ddaeee4123c24e4d53a032c00d"
k_subm_changed = "394ed7006ee5dc8bddfd132b64001d5dfc0ffdd3"
k_no_subm_tag = "0.1.6"
def _do_base_tests(self, rwrepo):
"""Perform all tests in the given repository, it may be bare or nonbare"""
# manual instantiation
smm = Submodule(rwrepo, "\0"*20)
# name needs to be set in advance
self.failUnlessRaises(AttributeError, getattr, smm, 'name')
# iterate - 1 submodule
sms = Submodule.list_items(rwrepo, self.k_subm_current)
assert len(sms) == 1
sm = sms[0]
# at a different time, there is None
assert len(Submodule.list_items(rwrepo, self.k_no_subm_tag)) == 0
assert sm.path == 'git/ext/gitdb'
assert sm.path != sm.name # in our case, we have ids there, which don't equal the path
assert sm.url == 'git://github.com/gitpython-developers/gitdb.git'
assert sm.branch_path == 'refs/heads/master' # the default ...
assert sm.branch_name == 'master'
assert sm.parent_commit == rwrepo.head.commit
# size is always 0
assert sm.size == 0
# the module is not checked-out yet
self.failUnlessRaises(InvalidGitRepositoryError, sm.module)
# which is why we can't get the branch either - it points into the module() repository
self.failUnlessRaises(InvalidGitRepositoryError, getattr, sm, 'branch')
# branch_path works, as its just a string
assert isinstance(sm.branch_path, basestring)
# some commits earlier we still have a submodule, but its at a different commit
smold = Submodule.iter_items(rwrepo, self.k_subm_changed).next()
assert smold.binsha != sm.binsha
assert smold != sm # the name changed
# force it to reread its information
del(smold._url)
smold.url == sm.url
# test config_reader/writer methods
sm.config_reader()
new_smclone_path = None # keep custom paths for later
new_csmclone_path = None #
if rwrepo.bare:
self.failUnlessRaises(InvalidGitRepositoryError, sm.config_writer)
else:
writer = sm.config_writer()
# for faster checkout, set the url to the local path
new_smclone_path = to_native_path_linux(join_path_native(self.rorepo.working_tree_dir, sm.path))
writer.set_value('url', new_smclone_path)
del(writer)
assert sm.config_reader().get_value('url') == new_smclone_path
assert sm.url == new_smclone_path
# END handle bare repo
smold.config_reader()
# cannot get a writer on historical submodules
if not rwrepo.bare:
self.failUnlessRaises(ValueError, smold.config_writer)
# END handle bare repo
# make the old into a new - this doesn't work as the name changed
prev_parent_commit = smold.parent_commit
self.failUnlessRaises(ValueError, smold.set_parent_commit, self.k_subm_current)
# the sha is properly updated
smold.set_parent_commit(self.k_subm_changed+"~1")
assert smold.binsha != sm.binsha
# raises if the sm didn't exist in new parent - it keeps its
# parent_commit unchanged
self.failUnlessRaises(ValueError, smold.set_parent_commit, self.k_no_subm_tag)
# TEST TODO: if a path in the gitmodules file, but not in the index, it raises
# TEST UPDATE
##############
# module retrieval is not always possible
if rwrepo.bare:
self.failUnlessRaises(InvalidGitRepositoryError, sm.module)
self.failUnlessRaises(InvalidGitRepositoryError, sm.remove)
self.failUnlessRaises(InvalidGitRepositoryError, sm.add, rwrepo, 'here', 'there')
else:
# its not checked out in our case
self.failUnlessRaises(InvalidGitRepositoryError, sm.module)
assert not sm.module_exists()
# currently there is only one submodule
assert len(list(rwrepo.iter_submodules())) == 1
assert sm.binsha != "\0"*20
# TEST ADD
###########
# preliminary tests
# adding existing returns exactly the existing
sma = Submodule.add(rwrepo, sm.name, sm.path)
assert sma.path == sm.path
# no url and no module at path fails
self.failUnlessRaises(ValueError, Submodule.add, rwrepo, "newsubm", "pathtorepo", url=None)
# CONTINUE UPDATE
#################
# lets update it - its a recursive one too
newdir = os.path.join(sm.abspath, 'dir')
os.makedirs(newdir)
# update fails if the path already exists non-empty
self.failUnlessRaises(OSError, sm.update)
os.rmdir(newdir)
# dry-run does nothing
sm.update(dry_run=True, progress=prog)
assert not sm.module_exists()
assert sm.update() is sm
sm_repopath = sm.path # cache for later
assert sm.module_exists()
assert isinstance(sm.module(), git.Repo)
assert sm.module().working_tree_dir == sm.abspath
# INTERLEAVE ADD TEST
#####################
# url must match the one in the existing repository ( if submodule name suggests a new one )
# or we raise
self.failUnlessRaises(ValueError, Submodule.add, rwrepo, "newsubm", sm.path, "git://someurl/repo.git")
# CONTINUE UPDATE
#################
# we should have setup a tracking branch, which is also active
assert sm.module().head.ref.tracking_branch() is not None
# delete the whole directory and re-initialize
shutil.rmtree(sm.abspath)
assert len(sm.children()) == 0
# dry-run does nothing
sm.update(dry_run=True, recursive=False, progress=prog)
assert len(sm.children()) == 0
sm.update(recursive=False)
assert len(list(rwrepo.iter_submodules())) == 2
assert len(sm.children()) == 1 # its not checked out yet
csm = sm.children()[0]
assert not csm.module_exists()
csm_repopath = csm.path
# adjust the path of the submodules module to point to the local destination
new_csmclone_path = to_native_path_linux(join_path_native(self.rorepo.working_tree_dir, sm.path, csm.path))
csm.config_writer().set_value('url', new_csmclone_path)
assert csm.url == new_csmclone_path
# dry-run does nothing
assert not csm.module_exists()
sm.update(recursive=True, dry_run=True, progress=prog)
assert not csm.module_exists()
# update recursively again
sm.update(recursive=True)
assert csm.module_exists()
# tracking branch once again
csm.module().head.ref.tracking_branch() is not None
# this flushed in a sub-submodule
assert len(list(rwrepo.iter_submodules())) == 2
# reset both heads to the previous version, verify that to_latest_revision works
smods = (sm.module(), csm.module())
for repo in smods:
repo.head.reset('HEAD~2', working_tree=1)
# END for each repo to reset
# dry run does nothing
sm.update(recursive=True, dry_run=True, progress=prog)
for repo in smods:
assert repo.head.commit != repo.head.ref.tracking_branch().commit
# END for each repo to check
sm.update(recursive=True, to_latest_revision=True)
for repo in smods:
assert repo.head.commit == repo.head.ref.tracking_branch().commit
# END for each repo to check
del(smods)
# if the head is detached, it still works ( but warns )
smref = sm.module().head.ref
sm.module().head.ref = 'HEAD~1'
# if there is no tracking branch, we get a warning as well
csm_tracking_branch = csm.module().head.ref.tracking_branch()
csm.module().head.ref.set_tracking_branch(None)
sm.update(recursive=True, to_latest_revision=True)
# to_latest_revision changes the child submodule's commit, it needs an
# update now
csm.set_parent_commit(csm.repo.head.commit)
# undo the changes
sm.module().head.ref = smref
csm.module().head.ref.set_tracking_branch(csm_tracking_branch)
# REMOVAL OF REPOSITOTRY
########################
# must delete something
self.failUnlessRaises(ValueError, csm.remove, module=False, configuration=False)
# We have modified the configuration, hence the index is dirty, and the
# deletion will fail
# NOTE: As we did a few updates in the meanwhile, the indices were reset
# Hence we create some changes
csm.set_parent_commit(csm.repo.head.commit)
sm.config_writer().set_value("somekey", "somevalue")
csm.config_writer().set_value("okey", "ovalue")
self.failUnlessRaises(InvalidGitRepositoryError, sm.remove)
# if we remove the dirty index, it would work
sm.module().index.reset()
# still, we have the file modified
self.failUnlessRaises(InvalidGitRepositoryError, sm.remove, dry_run=True)
sm.module().index.reset(working_tree=True)
# enforce the submodule to be checked out at the right spot as well.
csm.update()
# this would work
assert sm.remove(dry_run=True) is sm
assert sm.module_exists()
sm.remove(force=True, dry_run=True)
assert sm.module_exists()
# but ... we have untracked files in the child submodule
fn = join_path_native(csm.module().working_tree_dir, "newfile")
open(fn, 'w').write("hi")
self.failUnlessRaises(InvalidGitRepositoryError, sm.remove)
# forcibly delete the child repository
prev_count = len(sm.children())
assert csm.remove(force=True) is csm
assert not csm.exists()
assert not csm.module_exists()
assert len(sm.children()) == prev_count - 1
# now we have a changed index, as configuration was altered.
# fix this
sm.module().index.reset(working_tree=True)
# now delete only the module of the main submodule
assert sm.module_exists()
sm.remove(configuration=False)
assert sm.exists()
assert not sm.module_exists()
assert sm.config_reader().get_value('url')
# delete the rest
sm.remove()
assert not sm.exists()
assert not sm.module_exists()
assert len(rwrepo.submodules) == 0
# ADD NEW SUBMODULE
###################
# add a simple remote repo - trailing slashes are no problem
smid = "newsub"
osmid = "othersub"
nsm = Submodule.add(rwrepo, smid, sm_repopath, new_smclone_path+"/", None, no_checkout=True)
assert nsm.name == smid
assert nsm.module_exists()
assert nsm.exists()
# its not checked out
assert not os.path.isfile(join_path_native(nsm.module().working_tree_dir, Submodule.k_modules_file))
assert len(rwrepo.submodules) == 1
# add another submodule, but into the root, not as submodule
osm = Submodule.add(rwrepo, osmid, csm_repopath, new_csmclone_path, Submodule.k_head_default)
assert osm != nsm
assert osm.module_exists()
assert osm.exists()
assert os.path.isfile(join_path_native(osm.module().working_tree_dir, 'setup.py'))
assert len(rwrepo.submodules) == 2
# commit the changes, just to finalize the operation
rwrepo.index.commit("my submod commit")
assert len(rwrepo.submodules) == 2
# needs update as the head changed, it thinks its in the history
# of the repo otherwise
nsm.set_parent_commit(rwrepo.head.commit)
osm.set_parent_commit(rwrepo.head.commit)
# MOVE MODULE
#############
# invalid inptu
self.failUnlessRaises(ValueError, nsm.move, 'doesntmatter', module=False, configuration=False)
# renaming to the same path does nothing
assert nsm.move(sm.path) is nsm
# rename a module
nmp = join_path_native("new", "module", "dir") + "/" # new module path
pmp = nsm.path
abspmp = nsm.abspath
assert nsm.move(nmp) is nsm
nmp = nmp[:-1] # cut last /
nmpl = to_native_path_linux(nmp)
assert nsm.path == nmpl
assert rwrepo.submodules[0].path == nmpl
mpath = 'newsubmodule'
absmpath = join_path_native(rwrepo.working_tree_dir, mpath)
open(absmpath, 'w').write('')
self.failUnlessRaises(ValueError, nsm.move, mpath)
os.remove(absmpath)
# now it works, as we just move it back
nsm.move(pmp)
assert nsm.path == pmp
assert rwrepo.submodules[0].path == pmp
# TODO lowprio: test remaining exceptions ... for now its okay, the code looks right
# REMOVE 'EM ALL
################
# if a submodule's repo has no remotes, it can't be added without an explicit url
osmod = osm.module()
osm.remove(module=False)
for remote in osmod.remotes:
remote.remove(osmod, remote.name)
assert not osm.exists()
self.failUnlessRaises(ValueError, Submodule.add, rwrepo, osmid, csm_repopath, url=None)
# END handle bare mode
# Error if there is no submodule file here
self.failUnlessRaises(IOError, Submodule._config_parser, rwrepo, rwrepo.commit(self.k_no_subm_tag), True)
@with_rw_repo(k_subm_current)
def test_base_rw(self, rwrepo):
self._do_base_tests(rwrepo)
@with_rw_repo(k_subm_current, bare=True)
def test_base_bare(self, rwrepo):
self._do_base_tests(rwrepo)
@with_rw_repo(k_subm_current, bare=False)
def test_root_module(self, rwrepo):
# Can query everything without problems
rm = RootModule(self.rorepo)
assert rm.module() is self.rorepo
# try attributes
rm.binsha
rm.mode
rm.path
assert rm.name == rm.k_root_name
assert rm.parent_commit == self.rorepo.head.commit
rm.url
rm.branch
assert len(rm.list_items(rm.module())) == 1
rm.config_reader()
rm.config_writer()
# deep traversal gitdb / async
rsmsp = [sm.path for sm in rm.traverse()]
assert len(rsmsp) >= 2 # gitdb and async [and smmap], async being a child of gitdb
# cannot set the parent commit as root module's path didn't exist
self.failUnlessRaises(ValueError, rm.set_parent_commit, 'HEAD')
# TEST UPDATE
#############
# setup commit which remove existing, add new and modify existing submodules
rm = RootModule(rwrepo)
assert len(rm.children()) == 1
# modify path without modifying the index entry
# ( which is what the move method would do properly )
#==================================================
sm = rm.children()[0]
pp = "path/prefix"
fp = join_path_native(pp, sm.path)
prep = sm.path
assert not sm.module_exists() # was never updated after rwrepo's clone
# assure we clone from a local source
sm.config_writer().set_value('url', to_native_path_linux(join_path_native(self.rorepo.working_tree_dir, sm.path)))
# dry-run does nothing
sm.update(recursive=False, dry_run=True, progress=prog)
assert not sm.module_exists()
sm.update(recursive=False)
assert sm.module_exists()
sm.config_writer().set_value('path', fp) # change path to something with prefix AFTER url change
# update fails as list_items in such a situations cannot work, as it cannot
# find the entry at the changed path
self.failUnlessRaises(InvalidGitRepositoryError, rm.update, recursive=False)
# move it properly - doesn't work as it its path currently points to an indexentry
# which doesn't exist ( move it to some path, it doesn't matter here )
self.failUnlessRaises(InvalidGitRepositoryError, sm.move, pp)
# reset the path(cache) to where it was, now it works
sm.path = prep
sm.move(fp, module=False) # leave it at the old location
assert not sm.module_exists()
cpathchange = rwrepo.index.commit("changed sm path") # finally we can commit
# update puts the module into place
rm.update(recursive=False, progress=prog)
sm.set_parent_commit(cpathchange)
assert sm.module_exists()
# add submodule
#================
nsmn = "newsubmodule"
nsmp = "submrepo"
async_url = to_native_path_linux(join_path_native(self.rorepo.working_tree_dir, rsmsp[0], rsmsp[1]))
nsm = Submodule.add(rwrepo, nsmn, nsmp, url=async_url)
csmadded = rwrepo.index.commit("Added submodule").hexsha # make sure we don't keep the repo reference
nsm.set_parent_commit(csmadded)
assert nsm.module_exists()
# in our case, the module should not exist, which happens if we update a parent
# repo and a new submodule comes into life
nsm.remove(configuration=False, module=True)
assert not nsm.module_exists() and nsm.exists()
# dry-run does nothing
rm.update(recursive=False, dry_run=True, progress=prog)
# otherwise it will work
rm.update(recursive=False, progress=prog)
assert nsm.module_exists()
# remove submodule - the previous one
#====================================
sm.set_parent_commit(csmadded)
smp = sm.abspath
assert not sm.remove(module=False).exists()
assert os.path.isdir(smp) # module still exists
csmremoved = rwrepo.index.commit("Removed submodule")
# an update will remove the module
# not in dry_run
rm.update(recursive=False, dry_run=True)
assert os.path.isdir(smp)
rm.update(recursive=False)
assert not os.path.isdir(smp)
# change url
#=============
# to the first repository, this way we have a fast checkout, and a completely different
# repository at the different url
nsm.set_parent_commit(csmremoved)
nsmurl = to_native_path_linux(join_path_native(self.rorepo.working_tree_dir, rsmsp[0]))
nsm.config_writer().set_value('url', nsmurl)
csmpathchange = rwrepo.index.commit("changed url")
nsm.set_parent_commit(csmpathchange)
prev_commit = nsm.module().head.commit
# dry-run does nothing
rm.update(recursive=False, dry_run=True, progress=prog)
assert nsm.module().remotes.origin.url != nsmurl
rm.update(recursive=False, progress=prog)
assert nsm.module().remotes.origin.url == nsmurl
# head changed, as the remote url and its commit changed
assert prev_commit != nsm.module().head.commit
# add the submodule's changed commit to the index, which is what the
# user would do
# beforehand, update our instance's binsha with the new one
nsm.binsha = nsm.module().head.commit.binsha
rwrepo.index.add([nsm])
# change branch
#=================
# we only have one branch, so we switch to a virtual one, and back
# to the current one to trigger the difference
cur_branch = nsm.branch
nsmm = nsm.module()
prev_commit = nsmm.head.commit
for branch in ("some_virtual_branch", cur_branch.name):
nsm.config_writer().set_value(Submodule.k_head_option, git.Head.to_full_path(branch))
csmbranchchange = rwrepo.index.commit("changed branch to %s" % branch)
nsm.set_parent_commit(csmbranchchange)
# END for each branch to change
# Lets remove our tracking branch to simulate some changes
nsmmh = nsmm.head
assert nsmmh.ref.tracking_branch() is None # never set it up until now
assert not nsmmh.is_detached
#dry run does nothing
rm.update(recursive=False, dry_run=True, progress=prog)
assert nsmmh.ref.tracking_branch() is None
# the real thing does
rm.update(recursive=False, progress=prog)
assert nsmmh.ref.tracking_branch() is not None
assert not nsmmh.is_detached
# recursive update
# =================
# finally we recursively update a module, just to run the code at least once
# remove the module so that it has more work
assert len(nsm.children()) >= 1 # could include smmap
assert nsm.exists() and nsm.module_exists() and len(nsm.children()) >= 1
# assure we pull locally only
nsmc = nsm.children()[0]
nsmc.config_writer().set_value('url', async_url)
rm.update(recursive=True, progress=prog, dry_run=True) # just to run the code
rm.update(recursive=True, progress=prog)
# gitdb: has either 1 or 2 submodules depending on the version
assert len(nsm.children()) >= 1 and nsmc.module_exists()
| GbalsaC/bitnamiP | venv/lib/python2.7/site-packages/git/test/test_submodule.py | Python | agpl-3.0 | 20,323 |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat, Inc. <http://www.redhat.com>
# This file is part of GlusterFS.
#
# This file is licensed to you under your choice of the GNU Lesser
# General Public License, version 3 or any later version (LGPLv3 or
# later), or the GNU General Public License, version 2 (GPLv2), in all
# cases as published by the Free Software Foundation.
#
from .utils import heal_execute, heal_execute_xml, GlusterCmdException
from .parsers import parse_heal_statistics, parse_heal_info
HEAL_INFO_TYPES = ["healed", "heal-failed", "split-brain"]
def enable(volname):
"""
Enable Volume Heal
:param volname: Volume Name
:returns: Output of Enable command, raises
GlusterCmdException((rc, out, err)) on error
"""
cmd = [volname, "enable"]
return heal_execute(cmd)
def disable(volname):
"""
Disable Volume Heal
:param volname: Volume Name
:returns: Output of Disable command, raises
GlusterCmdException((rc, out, err)) on error
"""
cmd = [volname, "disable"]
return heal_execute(cmd)
def full(volname):
"""
Full Volume Heal
:param volname: Volume Name
:returns: Output of Full Heal command, raises
GlusterCmdException((rc, out, err)) on error
"""
cmd = [volname, "full"]
return heal_execute(cmd)
def statistics(volname):
"""
Get Statistics of Heal
:param volname: Volume Name
:returns: Output of Statistics command, raises
GlusterCmdException((rc, out, err)) on error
"""
cmd = [volname, "statistics"]
return parse_heal_statistics(heal_execute_xml(cmd))
def info(volname, info_type=None):
"""
Get Volume Heal Info
:param volname: Volume Name
:returns: Output of Heal Info command, raises
GlusterCmdException((rc, out, err)) on error
"""
cmd = [volname, "info"]
if info_type is not None:
if info_type.lower() not in HEAL_INFO_TYPES:
raise GlusterCmdException((-1, "", "Invalid Heal Info Types"))
cmd += [info_type.lower()]
return parse_heal_info(heal_execute_xml(cmd))
def split_brain(volname, bigger_file=None,
latest_mtime=None, source_brick=None, path=None):
"""
Split Brain Resolution
:param volname: Volume Name
:param bigger_file: File Path of Bigger file
:param latest_mtime: File Path of Latest mtime
:param source_brick: Source Brick for Good Copy
:param path: Resolution of this path/file
:returns: Output of Split-brain command, raises
GlusterCmdException((rc, out, err)) on error
"""
cmd = [volname, "split-brain"]
if bigger_file is not None:
cmd += ["bigger-file", bigger_file]
if latest_mtime is not None:
cmd += ["latest-mtime", latest_mtime]
if source_brick is not None:
cmd += ["source-brick", source_brick]
if path is not None:
cmd += [path]
return heal_execute(cmd)
| chawlanikhil24/glustercli-python | gluster/cli/heal.py | Python | gpl-2.0 | 2,950 |
# -*- coding: utf8 -*-
from django.db import models
class Person(models.Model):
""" an actual singular human being """
name = models.CharField(blank=True, max_length=100)
email = models.EmailField()
def __unicode__(self):
return self.name
class Group(models.Model):
""" a music group """
name = models.CharField(max_length=200, unique=True, help_text="Name of the group")
members = models.ManyToManyField(Person,
blank=True,
help_text="Enter text to search for and add each member of the group.")
url = models.URLField(blank=True)
def __unicode__(self):
return self.name
class Label(models.Model):
""" a record label """
name = models.CharField(max_length=200, unique=True)
owner = models.ForeignKey(Person, blank=True, null=True)
url = models.URLField(blank=True)
def __unicode__(self):
return self.name
class Song(models.Model):
""" a song """
title = models.CharField(blank=False, max_length=200)
group = models.ForeignKey(Group)
def __unicode__(self):
return self.title
class Release(models.Model):
""" a music release/product """
title = models.CharField(max_length=100)
catalog = models.CharField(blank=True, max_length=100)
group = models.ForeignKey(Group, blank=True, null=True, verbose_name=u"Русский текст (group)")
label = models.ForeignKey(Label, blank=False, null=False)
songs = models.ManyToManyField(Song, blank=True)
def __unicode__(self):
return self.title
class Author(models.Model):
""" Author has multiple books,
via foreign keys
"""
name = models.CharField(max_length=100)
def __unicode__(self):
return self.name
class Book(models.Model):
""" Book has no admin, its an inline in the Author admin"""
author = models.ForeignKey(Author)
title = models.CharField(max_length=100)
about_group = models.ForeignKey(Group)
mentions_persons = models.ManyToManyField(Person, help_text="Person lookup renders html in menu")
def __unicode__(self):
return self.title
| jose-flores/django-ajax-selects | example/example/models.py | Python | gpl-3.0 | 2,142 |
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import mongoengine as me
from st2common.constants.secrets import MASKED_ATTRIBUTE_VALUE
from st2common.constants.types import ResourceType
from st2common.fields import ComplexDateTimeField
from st2common.models.db import stormbase
from st2common.services.rbac import get_roles_for_user
from st2common.util import date as date_utils
__all__ = [
'UserDB',
'TokenDB',
'ApiKeyDB'
]
class UserDB(stormbase.StormFoundationDB):
name = me.StringField(required=True, unique=True)
is_service = me.BooleanField(required=True, default=False)
nicknames = me.DictField(required=False,
help_text='"Nickname + origin" pairs for ChatOps auth')
def get_roles(self):
"""
Retrieve roles assigned to that user.
:rtype: ``list`` of :class:`RoleDB`
"""
result = get_roles_for_user(user_db=self)
return result
def get_permission_assignments(self):
# TODO
pass
class TokenDB(stormbase.StormFoundationDB):
user = me.StringField(required=True)
token = me.StringField(required=True, unique=True)
expiry = me.DateTimeField(required=True)
metadata = me.DictField(required=False,
help_text='Arbitrary metadata associated with this token')
class ApiKeyDB(stormbase.StormFoundationDB, stormbase.UIDFieldMixin):
"""
"""
RESOURCE_TYPE = ResourceType.API_KEY
UID_FIELDS = ['key_hash']
user = me.StringField(required=True)
key_hash = me.StringField(required=True, unique=True)
metadata = me.DictField(required=False,
help_text='Arbitrary metadata associated with this token')
created_at = ComplexDateTimeField(default=date_utils.get_datetime_utc_now,
help_text='The creation time of this ApiKey.')
enabled = me.BooleanField(required=True, default=True,
help_text='A flag indicating whether the ApiKey is enabled.')
meta = {
'indexes': [
{'fields': ['user']},
{'fields': ['key_hash']}
]
}
def __init__(self, *args, **values):
super(ApiKeyDB, self).__init__(*args, **values)
self.uid = self.get_uid()
def mask_secrets(self, value):
result = copy.deepcopy(value)
# In theory the key_hash is safe to return as it is one way. On the other
# hand given that this is actually a secret no real point in letting the hash
# escape. Since uid contains key_hash masking that as well.
result['key_hash'] = MASKED_ATTRIBUTE_VALUE
result['uid'] = MASKED_ATTRIBUTE_VALUE
return result
MODELS = [UserDB, TokenDB, ApiKeyDB]
| lakshmi-kannan/st2 | st2common/st2common/models/db/auth.py | Python | apache-2.0 | 3,508 |
from sqlalchemy import create_engine
from sqlalchemy import Column, ForeignKey, ColumnDefault
from sqlalchemy import Integer, String, Date, Boolean, Unicode, Float
from sqlalchemy.ext.declarative import declarative_base
from amir.database import get_declarative_base
Base = get_declarative_base()
## \defgroup DataBase
## @{
#Version 0.2 tables
class CustGroups(Base):
__tablename__ = "custGroups"
custGrpId = Column( Integer, primary_key = True )
custGrpCode = Column( String(50), nullable = False )
custGrpName = Column( Unicode(50), nullable = False )
custGrpDesc = Column( Unicode(200), nullable = True )
def __init__( self, custGrpCode, custGrpName, custGrpDesc ):
self.custGrpCode = custGrpCode
self.custGrpName = custGrpName
self.custGrpDesc = custGrpDesc
## @}
| AhmadHamzeei/Amir-Accounting | amir/database/CustGroups.py | Python | gpl-3.0 | 856 |
import json
from functools import wraps
from django.http import HttpResponse
class SimpleAjaxResult(object):
"""
A generic structure for returning API results intended for
server-side rendered AJAX calls.
"""
SUCCESS = 0
FAILURE = 1
def __init__(self, **kwargs):
self.result = kwargs.get('result', SimpleAjaxResult.FAILURE)
self.markup = kwargs.get('markup', None)
self.messages = kwargs.get('messages', None)
self.data = kwargs.get('data', None)
super(SimpleAjaxResult, self).__init__()
def serialize(self):
return json.dumps([self.__dict__])
def ajax_permission_required(permission):
"""
Provides a decorator that mirrors the Django permission_required decorator, returning
an ajax result instead of redirecting on failure.
"""
def decorator(target_view):
def wrapped_view(request, *args, **kwargs):
if request.user and request.user.is_authenticated() and request.user.has_perm(permission):
return target_view(request, *args, **kwargs)
result = SimpleAjaxResult(result=SimpleAjaxResult.FAILURE, messages=['The user does not have proper permissions for this action.'])
response = HttpResponse(content=result.serialize(), content_type='application/json')
response['X-Frame-Options'] = 'INVALID-VALUE'
return response
return wraps(target_view)(wrapped_view)
return decorator
def ajax_login_required(target_view):
"""
Provides a decorator that mirrors the Django login_required decorator, returning
an ajax result instead of redirecting on failure.
"""
def wrapped_view(request, *args, **kwargs):
if request.user and request.user.is_authenticated():
return target_view(request, *args, **kwargs)
result = SimpleAjaxResult(result=SimpleAjaxResult.FAILURE, messages=['Login is required for this action.'])
response = HttpResponse(content=result.serialize(), content_type='application/json')
response['X-Frame-Options'] = 'INVALID-VALUE'
return response
return wraps(target_view)(wrapped_view)
| jsquire/Portfolio | src/scripts/python/ajax_rpc.py | Python | mit | 2,209 |
import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('female_kwi')
mobileTemplate.setLevel(63)
mobileTemplate.setDifficulty(Difficulty.NORMAL)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(False)
mobileTemplate.setScale(1)
mobileTemplate.setMeatType("Herbivore Meat")
mobileTemplate.setMeatAmount(100)
mobileTemplate.setHideType("Leathery Hide")
mobileTemplate.setHideAmount(86)
mobileTemplate.setBoneType("Animal Bones")
mobileTemplate.setBoneAmount(76)
mobileTemplate.setSocialGroup("kwi")
mobileTemplate.setAssistRange(12)
mobileTemplate.setStalker(False)
mobileTemplate.setOptionsBitmask(Options.AGGRESSIVE | Options.ATTACKABLE)
templates = Vector()
templates.add('object/mobile/shared_kwi.iff')
mobileTemplate.setTemplates(templates)
attacks = Vector()
attacks.add('bm_bite_4')
attacks.add('bm_hamstring_4')
mobileTemplate.setDefaultAttack('creatureMeleeAttack')
mobileTemplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('female_kwi', mobileTemplate) | ProjectSWGCore/NGECore2 | scripts/mobiles/dathomir/female_kwi.py | Python | lgpl-3.0 | 1,352 |
# Copyright 2012-2013 John Sullivan
# Copyright 2012-2013 Other contributers as noted in the CONTRIBUTERS file
#
# This file is part of Galah.
#
# Galah is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Galah is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Galah. If not, see <http://www.gnu.org/licenses/>.
def tuplify(target):
"Transforms a single item into a tuple if it is not already a tuple."
if type(target) is not tuple:
return (target, )
else:
return target | tugluck/galah | galah/base/utility.py | Python | agpl-3.0 | 967 |
#!/usr/bin/python
import sys
import os
import pprint
import struct
# Change path so we find Xlib
sys.path.insert(1, os.path.join(sys.path[0], '..'))
def dummy_buffer(str, x, y = sys.maxint):
return str[x:y]
__builtins__.buffer = dummy_buffer
from Xlib.protocol import display, request, rq, event
from Xlib import error
# We don't want any fancy dictwrapper, just plain mappings
rq.DictWrapper = lambda x: x
class BugFile:
def __init__(self, file):
self.file = file
self.cbuf = self.sbuf = ''
def read_client(self, bytes):
while len(self.cbuf) < bytes and self.file:
self.read_next()
d = self.cbuf[:bytes]
self.cbuf = self.cbuf[bytes:]
return d
def read_server(self, bytes):
while len(self.sbuf) < bytes and self.file:
self.read_next()
d = self.sbuf[:bytes]
self.sbuf = self.sbuf[bytes:]
return d
def read_next(self):
line = self.file.readline()
if line == '':
self.file = None
return
src = line[0]
length = int(line[1:-1])
data = self.file.read(length)
if src == 'C':
self.cbuf = self.cbuf + data
elif src == 'S':
self.sbuf = self.sbuf + data
else:
raise ValueError('Bad control line: %s' % line)
class ParseString:
def __init__(self, datafunc):
self.get_data = datafunc
self.data = ''
def __getitem__(self, i):
if i < 0:
raise ValueError('bad string index: %d' % i)
if len(self.data) <= i:
if not self.get_data:
raise RuntimeError('attempt to allocate more data after returning a new ParseString')
self.data = self.data + self.get_data(i - len(self.data) + 1)
return self.data[i]
def __getslice__(self, i, j):
if j == sys.maxint:
if self.get_data:
ps = ParseString(self.get_data)
self.get_data = None
return ps
else:
raise RuntimeError('attempt to allocate another ParseString')
if i < 0 or j < 0 or i > j:
raise ValueError('bad slice indices: [%d:%d]' % (i, j))
if len(self.data) < j:
if not self.get_data:
raise RuntimeError('attempt to allocate more data after returning a new ParseString')
self.data = self.data + self.get_data(j - len(self.data))
return self.data[i:j]
class DummyDisplay:
def get_resource_class(self, name):
return None
class ParseXbug:
def __init__(self, infile = sys.stdin, outfile = sys.stdout):
bf = BugFile(infile)
self.cdata = ParseString(bf.read_client)
sdata = ParseString(bf.read_server)
self.outfile = outfile
self.xpprint = pprint.PrettyPrinter(indent = 2, stream = outfile)
self.disp = DummyDisplay()
# Parse client setup request
r, self.cdata = display.ConnectionSetupRequest._request.parse_binary(self.cdata, self.disp)
self.print_xbug('request', 'ConnectionSetup', r)
# Parse server reply
r, sdata = display.ConnectionSetupRequest._reply.parse_binary(sdata, self.disp)
extra = r['additional_length'] * 4
del r['additional_length']
extradata = sdata[:extra]
sdata = sdata[extra:]
if r['status'] == 0:
r['reason'] = extradata[:r['reason_length']]
del r['status']
del r['reason_length']
self.print_xbug('error', 'ConnectionSetup', r)
return
elif r['status'] == 1:
r2, d = display.ConnectionSetupRequest._success_reply.parse_binary(extradata, self.disp)
del r['status']
del r['reason_length']
r.update(r2)
del r2
self.print_xbug('reply', 'ConnectionSetup', r)
else:
raise ValueError('bad connection setup reply status: %d' % r['status'])
self.last_serial = 0
self.last_request = None
while 1:
# Get next server item, always at least 32 bytes
d = sdata[:32]
if len(d) != 32:
# Print out remaining requests
try:
self.get_requests(sys.maxint)
except ValueError:
pass
return
sdata = sdata[32:]
# Check type
t = ord(d[0])
# Error
if t == 0:
# Code is second byte
code = ord(d[1])
# Fetch error class
estruct = error.xerror_class.get(code, error.XError)
r, d = estruct._fields.parse_binary(d, self.disp)
del r['type']
self.get_requests(r['sequence_number'])
self.print_xbug('error', estruct.__name__, r)
# Reply
elif t == 1:
# Get sequence number, and read corresponding request
sno = struct.unpack('=H', d[2:4])[0]
self.get_requests(sno)
# Get entire reply length
rlen = int(struct.unpack('=L', d[4:8])[0]) * 4
d = d + sdata[:rlen]
sdata = sdata[rlen:]
if self.last_request:
r, d = self.last_request._reply.parse_binary(d, self.disp)
self.print_xbug('reply', self.last_request.__name__, r)
else:
self.print_xbug('reply', 'Unknown',
{ 'sequence_number': sno })
# Some event
else:
estruct = event.event_class.get(t, event.AnyEvent)
r, d = estruct._fields.parse_binary(d, self.disp)
self.get_requests(r['sequence_number'])
self.print_xbug('event', estruct.__name__, r)
def get_requests(self, serial):
# Get request length
while self.last_serial < serial:
d = self.cdata[2:4]
if len(d) != 2:
raise ValueError('client request missing')
rlen = struct.unpack('=H', d)[0] * 4
d = self.cdata[:rlen]
if len(d) != rlen:
raise ValueError('client request missing')
self.cdata = self.cdata[rlen:]
opcode = ord(d[0])
self.last_request = request.major_codes.get(opcode)
self.last_serial = self.last_serial + 1
if self.last_request:
r, d = self.last_request._request.parse_binary(d, self.disp)
r['sequence_number'] = self.last_serial
self.print_xbug('request', self.last_request.__name__, r)
else:
self.print_xbug('request', 'Unknown (%d)' % opcode,
{ 'sequence_number': self.last_serial })
def print_xbug(self, rtype, name, data):
self.outfile.write('%-8s %s\n' % (rtype + ':', name))
self.xpprint.pprint(data)
self.outfile.write('\n')
if __name__ == '__main__':
ParseXbug()
| alexer/python-xlib | utils/parsexbug.py | Python | gpl-2.0 | 7,194 |
import sys
import unittest
from pyplink.tests import test_suite as pyplink_test_suite
result = unittest.TextTestRunner().run(pyplink_test_suite)
sys.exit(0 if result.wasSuccessful() else 1)
| lemieuxl/pyplink | conda_recipe/run_test.py | Python | mit | 195 |
import json
from sdcclient._common import _SdcCommon
def policy_action_stop():
return {
"type": "POLICY_ACTION_STOP",
}
def policy_action_capture(file_name, secs_before=5, secs_after=15, filter=""):
return {
"afterEventNs": secs_after * 1_000_000_000,
"beforeEventNs": secs_before * 1_000_000_000,
"isLimitedToContainer": False,
"type": "POLICY_ACTION_CAPTURE",
"filter": filter,
"name": file_name,
"bucketName": "",
"storageType": "S3"
}
def policy_action_pause():
return {
"type": "POLICY_ACTION_PAUSE",
}
def policy_action_kill():
return {
"type": "POLICY_ACTION_KILL",
}
class PolicyClientV2(_SdcCommon):
def __init__(self, token="", sdc_url='https://secure.sysdig.com', ssl_verify=True, custom_headers=None):
super(PolicyClientV2, self).__init__(token, sdc_url, ssl_verify, custom_headers)
self.product = "SDS"
def create_default_policies(self):
"""
Create new policies based on the currently available set of rules. For now, this only covers Falco rules, but we might extend
the endpoint later. The backend should use the defaultPolicies property of a previously provided FalcoRulesFiles model as
guidance on the set of policies to create. The backend should only create new policies (not delete or modify), and should only
create new policies if there is not an existing policy with the same name.
Returns: A touple (bool, res/err) where the first element indicates if the API call was successful and the second the error or the result.
Examples:
>>> from sdcclient.secure import PolicyClientV2
>>> client = PolicyClientV2(sdc_url="https://secure.sysdig.com", token=SECURE_TOKEN)
>>> ok, res = client.create_default_policies()
"""
res = self.http.post(self.url + '/api/v2/policies/default', headers=self.hdrs, verify=self.ssl_verify)
return self._request_result(res)
def delete_all_policies(self):
"""
Delete all existing policies.
Returns: A touple (bool, res/err) where the first element indicates if the API call was successful and the second the error or the result.
Examples:
>>> from sdcclient.secure import PolicyClientV2
>>> client = PolicyClientV2(sdc_url="https://secure.sysdig.com", token=SECURE_TOKEN)
>>> ok, res = client.delete_all_policies()
"""
ok, res = self.list_policies()
if not ok:
return False, res
for policy in res:
ok, res = self.delete_policy_id(policy["id"])
if not ok:
return False, res
return True, "Policies Deleted"
def list_policies(self):
"""
List the current set of policies.
Returns: A touple (bool, res/err) where the first element indicates if the API call was successful and the second the error or the result.
Examples:
>>> from sdcclient.secure import PolicyClientV2
>>> client = PolicyClientV2(sdc_url="https://secure.sysdig.com", token=SECURE_TOKEN)
>>> ok, res = client.list_policies()
"""
res = self.http.get(self.url + '/api/v2/policies', headers=self.hdrs, verify=self.ssl_verify)
return self._request_result(res)
def get_policy(self, name):
"""
Find the policy with name <name> and return its json description.
Args:
name(str): The name of the policy to fetch
Returns: A touple (bool, res/err) where the first element indicates if the API call was successful and the
second the error or the JSON object containing the policy.
Examples:
>>> import json
>>> from sdcclient.secure import PolicyClientV2
>>> client = PolicyClientV2(sdc_url="https://secure.sysdig.com", token=SECURE_TOKEN)
>>> ok, res = client.get_policy(name="Terminal shell in container")
>>> if ok:
>>> print((json.dumps(res, indent=2)))
"""
ok, res = self.list_policies()
if not ok:
return [False, res]
policies = res
# Find the policy with the given name and return it.
for policy in policies:
if policy["name"] == name:
return [True, policy]
return [False, "No policy with name {}".format(name)]
def get_policy_id(self, id):
"""
Find the policy with id <id> and return its json description.
Args:
id(int): The id of the policy to fetch
Returns: A touple (bool, res/err) where the first element indicates if the API call was successful and the
second the error or the JSON object containing the policy.
Examples:
>>> import json
>>> from sdcclient.secure import PolicyClientV2
>>> client = PolicyClientV2(sdc_url="https://secure.sysdig.com", token=SECURE_TOKEN)
>>> ok, res = client.get_policy_id(id=123456)
>>> if ok:
>>> print((json.dumps(res, indent=2)))
"""
res = self.http.get(self.url + '/api/v2/policies/{}'.format(id), headers=self.hdrs, verify=self.ssl_verify)
return self._request_result(res)
def add_policy(self, name, description, rule_names=[], actions=[], scope=None, severity=0, enabled=True,
notification_channels=[], type="falco"):
"""
Adds a new policy.
Args:
name(str): A short name for the policy
description(str): Description of policy
rule_names(list): Array of rule names. (They must be names instead of ids, as the rules list view is by name, to account for multiple rules having the same name).
actions(list): It can be a `policy_action_stop()`, `policy_action_pause()`, `policy_action_capture()` or `policy_action_kill()` action
scope(str): Where the policy is being applied- Container, Host etc.. (example: "container.image.repository = sysdig/agent")
severity(int): True if the policy should be considered
enabled(bool): How severe is this policy when violated. Range from 0 to 7 included.
notification_channels(list): ids of the notification channels to subscribe to the policy
type(str): Type of the Policy. It can be one of: `falco`, `list_matching`, `k8s_audit`.
Returns: A touple (bool, res/err) where the first element indicates if the API call was successful and the second the error or the result.
Examples:
>>> from sdcclient.secure import PolicyClientV2, policy_action_stop
>>> client = PolicyClientV2(sdc_url="https://secure.sysdig.com", token=SECURE_TOKEN)
>>> ok, res = client.add_policy(name="Terminal shell in container",
description="A shell was spawned by a program in a container with an attached terminal.",
rule_names=["Terminal shell in container"],
actions=[policy_action_stop()],
type="falco")
"""
policy = {
"name": name,
"description": description,
"ruleNames": rule_names,
"actions": actions,
"scope": scope,
"severity": severity,
"enabled": enabled,
"notificationChannelIds": notification_channels,
"type": type,
}
res = self.http.post(self.url + '/api/v2/policies', headers=self.hdrs, data=json.dumps(policy),
verify=self.ssl_verify)
return self._request_result(res)
def add_policy_json(self, policy_json):
"""
Add a new policy using the provided json.
Args:
policy_json: a description of the new policy
Returns: A touple (bool, res/err) where the first element indicates if the API call was successful and the second the error or the result.
Examples:
>>> import sys
>>> from sdcclient.secure import PolicyClientV2
>>> client = PolicyClientV2(sdc_url="https://secure.sysdig.com", token=SECURE_TOKEN)
>>> policy_json = sys.stdin.read()
>>> ok, res = client.add_policy_json(policy_json)
"""
try:
policy_obj = json.loads(policy_json)
if "origin" in policy_obj:
del policy_obj["origin"]
except Exception as e:
return [False, "policy json is not valid json: {}".format(str(e))]
res = self.http.post(self.url + '/api/v2/policies', headers=self.hdrs, data=json.dumps(policy_obj),
verify=self.ssl_verify)
return self._request_result(res)
def update_policy(self, id, name=None, description=None, rule_names=None, actions=None, scope=None,
severity=None, enabled=None, notification_channels=None):
"""
Update policy with the provided values. Only the defined values will be updated.
Args:
id(int): The id of the policy to update
name(str): A short name for the policy
description(str): Description of policy
rule_names(list): Array of rule names. (They must be names instead of ids, as the rules list view is by name, to account for multiple rules having the same name).
actions(list): It can be a `policy_action_stop()`, `policy_action_pause()`, `policy_action_capture()` or `policy_action_kill()` action
scope(str): Where the policy is being applied- Container, Host etc.. (example: "container.image.repository = sysdig/agent")
severity(int): True if the policy should be considered
enabled(bool): How severe is this policy when violated. Range from 0 to 7 included.
notification_channels(list): ids of the notification channels to subscribe to the policy
Returns: A touple (bool, res/err) where the first element indicates if the API call was successful and the second the error or the result.
Examples:
>>> from sdcclient.secure import PolicyClientV2, policy_action_stop
>>> client = PolicyClientV2(sdc_url="https://secure.sysdig.com", token=SECURE_TOKEN)
>>> ok, res = client.update_policy(name="Terminal shell in container",
description="A shell was spawned by a program in a container with an attached terminal.",
rule_names=["Terminal shell in container"],
actions=[policy_action_stop()])
"""
ok, res = self.get_policy_id(id)
if not ok:
return [False, res]
policy = res
if name is not None:
policy["name"] = name
if description is not None:
policy["description"] = description
if rule_names is not None:
policy["ruleNames"] = rule_names
if actions is not None:
policy["actions"] = actions
if scope is not None:
policy["scope"] = scope
if severity is not None:
policy["severity"] = severity
if enabled is not None:
policy["enabled"] = enabled
if notification_channels is not None:
policy["notificationChannelIds"] = notification_channels
res = self.http.put(self.url + '/api/v2/policies/{}'.format(id), headers=self.hdrs, data=json.dumps(policy),
verify=self.ssl_verify)
return self._request_result(res)
def update_policy_json(self, policy_json):
"""
Update an existing policy using the provided json. The 'id' field from the policy is
used to determine which policy to update.
Args:
policy_json(str): A description of the new policy
Returns: A touple (bool, res/err) where the first element indicates if the API call was successful and the second the error or the result.
Examples:
>>> import sys
>>> from sdcclient.secure import PolicyClientV2
>>> client = PolicyClientV2(sdc_url="https://secure.sysdig.com", token=SECURE_TOKEN)
>>> policy_json = sys.stdin.read()
>>> ok, res = client.update_policy_json(policy_json)
"""
try:
policy_obj = json.loads(policy_json)
if "origin" in policy_obj:
del policy_obj["origin"]
except Exception as e:
return [False, "policy json is not valid json: {}".format(str(e))]
if "id" not in policy_obj:
return [False, "Policy Json does not have an 'id' field"]
res = self.http.put(self.url + '/api/v2/policies/{}'.format(policy_obj["id"]), headers=self.hdrs,
data=json.dumps(policy_obj), verify=self.ssl_verify)
return self._request_result(res)
def delete_policy_name(self, name):
"""
Delete the policy with the given name.
Args:
name(str): The name of the policy to delete
Returns: A touple (bool, res/err) where the first element indicates if the API call was successful and the second the error or the JSON object representing the now-deleted policy.
Examples:
>>> from sdcclient.secure import PolicyClientV2
>>> client = PolicyClientV2(sdc_url="https://secure.sysdig.com", token=SECURE_TOKEN)
>>> ok, res = client.delete_policy_name(name="Terminal shell in container")
"""
ok, res = self.list_policies()
if not ok:
return [False, res]
# Find the policy with the given name and delete it
for policy in res:
if policy["name"] == name:
return self.delete_policy_id(policy["id"])
return [False, "No policy with name {}".format(name)]
def delete_policy_id(self, id):
"""
Delete the policy with the given name.
Args:
id(int): The id of the policy to delete
Returns: A touple (bool, res/err) where the first element indicates if the API call was successful and the second the error or the JSON object representing the now-deleted policy.
Examples:
>>> from sdcclient.secure import PolicyClientV2
>>> client = PolicyClientV2(sdc_url="https://secure.sysdig.com", token=SECURE_TOKEN)
>>> ok, res = client.delete_policy_id(id=123456)
"""
res = self.http.delete(self.url + '/api/v2/policies/{}'.format(id), headers=self.hdrs, verify=self.ssl_verify)
return self._request_result(res)
| draios/python-sdc-client | sdcclient/secure/_policy_v2.py | Python | mit | 14,933 |
# extracted from Louie, http://pylouie.org/
#
# Copyright (c) 2006 Patrick K. O'Brien, Mike C. Fletcher,
# Matthew R. Scott
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * Neither the name of the <ORGANIZATION> nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""Refactored 'safe reference from dispatcher.py"""
import weakref
import traceback
def safe_ref(target, on_delete=None):
"""Return a *safe* weak reference to a callable target.
- ``target``: The object to be weakly referenced, if it's a bound
method reference, will create a BoundMethodWeakref, otherwise
creates a simple weakref.
- ``on_delete``: If provided, will have a hard reference stored to
the callable to be called after the safe reference goes out of
scope with the reference object, (either a weakref or a
BoundMethodWeakref) as argument.
"""
if hasattr(target, 'im_self'):
if target.im_self is not None:
# Turn a bound method into a BoundMethodWeakref instance.
# Keep track of these instances for lookup by disconnect().
assert hasattr(target, 'im_func'), (
"safe_ref target %r has im_self, but no im_func, "
"don't know how to create reference"
% target
)
reference = BoundMethodWeakref(target=target, on_delete=on_delete)
return reference
if callable(on_delete):
return weakref.ref(target, on_delete)
else:
return weakref.ref(target)
class BoundMethodWeakref(object):
"""'Safe' and reusable weak references to instance methods.
BoundMethodWeakref objects provide a mechanism for referencing a
bound method without requiring that the method object itself
(which is normally a transient object) is kept alive. Instead,
the BoundMethodWeakref object keeps weak references to both the
object and the function which together define the instance method.
Attributes:
- ``key``: The identity key for the reference, calculated by the
class's calculate_key method applied to the target instance method.
- ``deletion_methods``: Sequence of callable objects taking single
argument, a reference to this object which will be called when
*either* the target object or target function is garbage
collected (i.e. when this object becomes invalid). These are
specified as the on_delete parameters of safe_ref calls.
- ``weak_self``: Weak reference to the target object.
- ``weak_func``: Weak reference to the target function.
Class Attributes:
- ``_all_instances``: Class attribute pointing to all live
BoundMethodWeakref objects indexed by the class's
calculate_key(target) method applied to the target objects.
This weak value dictionary is used to short-circuit creation so
that multiple references to the same (object, function) pair
produce the same BoundMethodWeakref instance.
"""
_all_instances = weakref.WeakValueDictionary()
def __new__(cls, target, on_delete=None, *arguments, **named):
"""Create new instance or return current instance.
Basically this method of construction allows us to
short-circuit creation of references to already- referenced
instance methods. The key corresponding to the target is
calculated, and if there is already an existing reference,
that is returned, with its deletion_methods attribute updated.
Otherwise the new instance is created and registered in the
table of already-referenced methods.
"""
key = cls.calculate_key(target)
current = cls._all_instances.get(key)
if current is not None:
current.deletion_methods.append(on_delete)
return current
else:
base = super(BoundMethodWeakref, cls).__new__(cls)
cls._all_instances[key] = base
base.__init__(target, on_delete, *arguments, **named)
return base
def __init__(self, target, on_delete=None):
"""Return a weak-reference-like instance for a bound method.
- ``target``: The instance-method target for the weak reference,
must have im_self and im_func attributes and be
reconstructable via the following, which is true of built-in
instance methods::
target.im_func.__get__( target.im_self )
- ``on_delete``: Optional callback which will be called when
this weak reference ceases to be valid (i.e. either the
object or the function is garbage collected). Should take a
single argument, which will be passed a pointer to this
object.
"""
def remove(weak, self=self):
"""Set self.isDead to True when method or instance is destroyed."""
methods = self.deletion_methods[:]
del self.deletion_methods[:]
try:
del self.__class__._all_instances[self.key]
except KeyError:
pass
for function in methods:
try:
if callable(function):
function(self)
except Exception:
try:
traceback.print_exc()
except AttributeError, e:
print ('Exception during saferef %s '
'cleanup function %s: %s' % (self, function, e))
self.deletion_methods = [on_delete]
self.key = self.calculate_key(target)
self.weak_self = weakref.ref(target.im_self, remove)
self.weak_func = weakref.ref(target.im_func, remove)
self.self_name = str(target.im_self)
self.func_name = str(target.im_func.__name__)
def calculate_key(cls, target):
"""Calculate the reference key for this reference.
Currently this is a two-tuple of the id()'s of the target
object and the target function respectively.
"""
return (id(target.im_self), id(target.im_func))
calculate_key = classmethod(calculate_key)
def __str__(self):
"""Give a friendly representation of the object."""
return "%s(%s.%s)" % (
self.__class__.__name__,
self.self_name,
self.func_name,
)
__repr__ = __str__
def __nonzero__(self):
"""Whether we are still a valid reference."""
return self() is not None
def __cmp__(self, other):
"""Compare with another reference."""
if not isinstance(other, self.__class__):
return cmp(self.__class__, type(other))
return cmp(self.key, other.key)
def __call__(self):
"""Return a strong reference to the bound method.
If the target cannot be retrieved, then will return None,
otherwise returns a bound instance method for our object and
function.
Note: You may call this method any number of times, as it does
not invalidate the reference.
"""
target = self.weak_self()
if target is not None:
function = self.weak_func()
if function is not None:
return function.__get__(target)
return None
| jek/flatland | flatland/util/_saferef.py | Python | mit | 8,766 |
from __future__ import division
import base64
import gc
import json
import os
import random
import sys
import time
import signal
import traceback
import urlparse
if '--iocp' in sys.argv:
from twisted.internet import iocpreactor
iocpreactor.install()
from twisted.internet import defer, reactor, protocol, tcp
from twisted.web import server
from twisted.python import log
from nattraverso import portmapper, ipdiscover
import bitcoin.p2p as bitcoin_p2p, bitcoin.data as bitcoin_data
from bitcoin import stratum, worker_interface, helper
from util import fixargparse, jsonrpc, variable, deferral, math, logging, switchprotocol
from . import networks, web, work
import p2pool, p2pool.data as p2pool_data, p2pool.node as p2pool_node
@defer.inlineCallbacks
def main(args, net, datadir_path, merged_urls, worker_endpoint):
try:
print 'p2pool (version %s)' % (p2pool.__version__,)
print
@defer.inlineCallbacks
def connect_p2p():
# connect to bitcoind over bitcoin-p2p
print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
factory = bitcoin_p2p.ClientFactory(net.PARENT)
reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
def long():
print ''' ...taking a while. Common reasons for this include all of bitcoind's connection slots being used...'''
long_dc = reactor.callLater(5, long)
yield factory.getProtocol() # waits until handshake is successful
if not long_dc.called: long_dc.cancel()
print ' ...success!'
print
defer.returnValue(factory)
if args.testnet: # establish p2p connection first if testnet so bitcoind can work without connections
factory = yield connect_p2p()
# connect to bitcoind over JSON-RPC and do initial getmemorypool
url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http', args.bitcoind_address, args.bitcoind_rpc_port)
print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
bitcoind = jsonrpc.HTTPProxy(url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30)
yield helper.check(bitcoind, net)
temp_work = yield helper.getwork(bitcoind)
bitcoind_getinfo_var = variable.Variable(None)
@defer.inlineCallbacks
def poll_warnings():
bitcoind_getinfo_var.set((yield deferral.retry('Error while calling getinfo:')(bitcoind.rpc_getinfo)()))
yield poll_warnings()
deferral.RobustLoopingCall(poll_warnings).start(20*60)
print ' ...success!'
print ' Current block hash: %x' % (temp_work['previous_block'],)
print ' Current block height: %i' % (temp_work['height'] - 1,)
print
if not args.testnet:
factory = yield connect_p2p()
print 'Determining payout address...'
if args.pubkey_hash is None:
address_path = os.path.join(datadir_path, 'cached_payout_address')
if os.path.exists(address_path):
with open(address_path, 'rb') as f:
address = f.read().strip('\r\n')
print ' Loaded cached address: %s...' % (address,)
else:
address = None
if address is not None:
res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))()
if not res['isvalid'] or not res['ismine']:
print ' Cached address is either invalid or not controlled by local bitcoind!'
address = None
if address is None:
print ' Getting payout address from bitcoind...'
address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()
with open(address_path, 'wb') as f:
f.write(address)
my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(address, net.PARENT)
else:
my_pubkey_hash = args.pubkey_hash
print ' ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)
print
print "Loading shares..."
shares = {}
known_verified = set()
def share_cb(share):
share.time_seen = 0 # XXX
shares[share.hash] = share
if len(shares) % 1000 == 0 and shares:
print " %i" % (len(shares),)
ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net, share_cb, known_verified.add)
print " ...done loading %i shares (%i verified)!" % (len(shares), len(known_verified))
print
print 'Initializing work...'
node = p2pool_node.Node(factory, bitcoind, shares.values(), known_verified, net)
yield node.start()
for share_hash in shares:
if share_hash not in node.tracker.items:
ss.forget_share(share_hash)
for share_hash in known_verified:
if share_hash not in node.tracker.verified.items:
ss.forget_verified_share(share_hash)
node.tracker.removed.watch(lambda share: ss.forget_share(share.hash))
node.tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
def save_shares():
for share in node.tracker.get_chain(node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 2*net.CHAIN_LENGTH)):
ss.add_share(share)
if share.hash in node.tracker.verified.items:
ss.add_verified_hash(share.hash)
deferral.RobustLoopingCall(save_shares).start(60)
print ' ...success!'
print
print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
@defer.inlineCallbacks
def parse(host):
port = net.P2P_PORT
if ':' in host:
host, port_str = host.split(':')
port = int(port_str)
defer.returnValue(((yield reactor.resolve(host)), port))
addrs = {}
if os.path.exists(os.path.join(datadir_path, 'addrs')):
try:
with open(os.path.join(datadir_path, 'addrs'), 'rb') as f:
addrs.update(dict((tuple(k), v) for k, v in json.loads(f.read())))
except:
print >>sys.stderr, 'error parsing addrs'
for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
try:
addr = yield addr_df
if addr not in addrs:
addrs[addr] = (0, time.time(), time.time())
except:
log.err()
connect_addrs = set()
for addr_df in map(parse, args.p2pool_nodes):
try:
connect_addrs.add((yield addr_df))
except:
log.err()
node.p2p_node = p2pool_node.P2PNode(node,
port=args.p2pool_port,
max_incoming_conns=args.p2pool_conns,
addr_store=addrs,
connect_addrs=connect_addrs,
desired_outgoing_conns=args.p2pool_outgoing_conns,
advertise_ip=args.advertise_ip,
external_ip=args.p2pool_external_ip,
)
node.p2p_node.start()
def save_addrs():
with open(os.path.join(datadir_path, 'addrs'), 'wb') as f:
f.write(json.dumps(node.p2p_node.addr_store.items()))
deferral.RobustLoopingCall(save_addrs).start(60)
print ' ...success!'
print
if args.upnp:
@defer.inlineCallbacks
def upnp_thread():
while True:
try:
is_lan, lan_ip = yield ipdiscover.get_local_ip()
if is_lan:
pm = yield portmapper.get_port_mapper()
yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
except defer.TimeoutError:
pass
except:
if p2pool.DEBUG:
log.err(None, 'UPnP error:')
yield deferral.sleep(random.expovariate(1/120))
upnp_thread()
# start listening for workers with a JSON-RPC server
print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1])
wb = work.WorkerBridge(node, my_pubkey_hash, args.donation_percentage, merged_urls, args.worker_fee)
web_root = web.get_web_root(wb, datadir_path, bitcoind_getinfo_var)
caching_wb = worker_interface.CachingWorkerBridge(wb)
worker_interface.WorkerInterface(caching_wb).attach_to(web_root, get_handler=lambda request: request.redirect('static/'))
web_serverfactory = server.Site(web_root)
serverfactory = switchprotocol.FirstByteSwitchFactory({'{': stratum.StratumServerFactory(caching_wb)}, web_serverfactory)
deferral.retry('Error binding to worker port:', traceback=False)(reactor.listenTCP)(worker_endpoint[1], serverfactory, interface=worker_endpoint[0])
with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
pass
print ' ...success!'
print
# done!
print 'Started successfully!'
print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % (worker_endpoint[1],)
if args.donation_percentage > 1.1:
print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % (args.donation_percentage,)
elif args.donation_percentage < .9:
print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % (args.donation_percentage,)
else:
print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % (args.donation_percentage,)
print 'You can increase this amount with --give-author argument! (or decrease it, if you must)'
print
if hasattr(signal, 'SIGALRM'):
signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
))
signal.siginterrupt(signal.SIGALRM, False)
deferral.RobustLoopingCall(signal.alarm, 30).start(1)
if args.irc_announce:
from twisted.words.protocols import irc
class IRCClient(irc.IRCClient):
nickname = 'p2pool%02i' % (random.randrange(100),)
channel = net.ANNOUNCE_CHANNEL
def lineReceived(self, line):
if p2pool.DEBUG:
print repr(line)
irc.IRCClient.lineReceived(self, line)
def signedOn(self):
self.in_channel = False
irc.IRCClient.signedOn(self)
self.factory.resetDelay()
self.join(self.channel)
@defer.inlineCallbacks
def new_share(share):
if not self.in_channel:
return
if share.pow_hash <= share.header['bits'].target and abs(share.timestamp - time.time()) < 10*60:
yield deferral.sleep(random.expovariate(1/60))
message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
if all('%x' % (share.header_hash,) not in old_message for old_message in self.recent_messages):
self.say(self.channel, message)
self._remember_message(message)
self.watch_id = node.tracker.verified.added.watch(new_share)
self.recent_messages = []
def joined(self, channel):
self.in_channel = True
def left(self, channel):
self.in_channel = False
def _remember_message(self, message):
self.recent_messages.append(message)
while len(self.recent_messages) > 100:
self.recent_messages.pop(0)
def privmsg(self, user, channel, message):
if channel == self.channel:
self._remember_message(message)
def connectionLost(self, reason):
node.tracker.verified.added.unwatch(self.watch_id)
print 'IRC connection lost:', reason.getErrorMessage()
class IRCClientFactory(protocol.ReconnectingClientFactory):
protocol = IRCClient
reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory(), bindAddress=(worker_endpoint[0], 0))
@defer.inlineCallbacks
def status_thread():
last_str = None
last_time = 0
while True:
yield deferral.sleep(3)
try:
height = node.tracker.get_height(node.best_share_var.value)
this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
height,
len(node.tracker.verified.items),
len(node.tracker.items),
len(node.p2p_node.peers),
sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
datums, dt = wb.local_rate_monitor.get_datums_in_last()
my_att_s = sum(datum['work']/dt for datum in datums)
my_shares_per_s = sum(datum['work']/dt/bitcoin_data.target_to_average_attempts(datum['share_target']) for datum in datums)
this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
math.format(int(my_att_s)),
math.format_dt(dt),
math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95),
math.format_dt(1/my_shares_per_s) if my_shares_per_s else '???',
)
if height > 2:
(stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()
stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, min(60*60//net.SHARE_PERIOD, height))
real_att_s = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, min(height - 1, 60*60//net.SHARE_PERIOD)) / (1 - stale_prop)
this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % (
shares, stale_orphan_shares, stale_doa_shares,
math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
node.get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8, net.PARENT.SYMBOL,
)
this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
math.format(int(real_att_s)),
100*stale_prop,
math.format_dt(2**256 / node.bitcoind_work.value['bits'].target / real_att_s),
)
for warning in p2pool_data.get_warnings(node.tracker, node.best_share_var.value, net, bitcoind_getinfo_var.value, node.bitcoind_work.value):
print >>sys.stderr, '#'*40
print >>sys.stderr, '>>> Warning: ' + warning
print >>sys.stderr, '#'*40
if gc.garbage:
print '%i pieces of uncollectable cyclic garbage! Types: %r' % (len(gc.garbage), map(type, gc.garbage))
if this_str != last_str or time.time() > last_time + 15:
print this_str
last_str = this_str
last_time = time.time()
except:
log.err()
status_thread()
except:
reactor.stop()
log.err(None, 'Fatal error:')
def run():
if not hasattr(tcp.Client, 'abortConnection'):
print "Twisted doesn't have abortConnection! Upgrade to a newer version of Twisted to avoid memory leaks!"
print 'Pausing for 3 seconds...'
time.sleep(3)
realnets = dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
parser = fixargparse.FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
parser.add_argument('--version', action='version', version=p2pool.__version__)
parser.add_argument('--net',
help='use specified network (default: bitcoin)',
action='store', choices=sorted(realnets), default='bitcoin', dest='net_name')
parser.add_argument('--testnet',
help='''use the network's testnet''',
action='store_const', const=True, default=False, dest='testnet')
parser.add_argument('--debug',
help='enable debugging mode',
action='store_const', const=True, default=False, dest='debug')
parser.add_argument('-a', '--address',
help='generate payouts to this address (default: <address requested from bitcoind>)',
type=str, action='store', default=None, dest='address')
parser.add_argument('--datadir',
help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
type=str, action='store', default=None, dest='datadir')
parser.add_argument('--logfile',
help='''log to this file (default: data/<NET>/log)''',
type=str, action='store', default=None, dest='logfile')
parser.add_argument('--merged',
help='call getauxblock on this url to get work for merged mining (example: http://ncuser:[email protected]:10332/)',
type=str, action='append', default=[], dest='merged_urls')
parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
help='donate this percentage of work towards the development of p2pool (default: 1.0)',
type=float, action='store', default=1.0, dest='donation_percentage')
parser.add_argument('--iocp',
help='use Windows IOCP API in order to avoid errors due to large number of sockets being open',
action='store_true', default=False, dest='iocp')
parser.add_argument('--irc-announce',
help='announce any blocks found on irc://irc.freenode.net/#p2pool',
action='store_true', default=False, dest='irc_announce')
parser.add_argument('--no-bugreport',
help='disable submitting caught exceptions to the author',
action='store_true', default=False, dest='no_bugreport')
p2pool_group = parser.add_argument_group('p2pool interface')
p2pool_group.add_argument('--p2pool-port', metavar='PORT',
help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
type=int, action='store', default=None, dest='p2pool_port')
p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
type=str, action='append', default=[], dest='p2pool_nodes')
parser.add_argument('--disable-upnp',
help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
action='store_false', default=True, dest='upnp')
p2pool_group.add_argument('--max-conns', metavar='CONNS',
help='maximum incoming connections (default: 40)',
type=int, action='store', default=40, dest='p2pool_conns')
p2pool_group.add_argument('--outgoing-conns', metavar='CONNS',
help='outgoing connections (default: 6)',
type=int, action='store', default=6, dest='p2pool_outgoing_conns')
p2pool_group.add_argument('--external-ip', metavar='ADDR[:PORT]',
help='specify your own public IP address instead of asking peers to discover it, useful for running dual WAN or asymmetric routing',
type=str, action='store', default=None, dest='p2pool_external_ip')
parser.add_argument('--disable-advertise',
help='''don't advertise local IP address as being available for incoming connections. useful for running a dark node, along with multiple -n ADDR's and --outgoing-conns 0''',
action='store_false', default=True, dest='advertise_ip')
worker_group = parser.add_argument_group('worker interface')
worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT',
help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
type=str, action='store', default=None, dest='worker_endpoint')
worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
type=float, action='store', default=0, dest='worker_fee')
bitcoind_group = parser.add_argument_group('bitcoind interface')
bitcoind_group.add_argument('--bitcoind-config-path', metavar='BITCOIND_CONFIG_PATH',
help='custom configuration file path (when bitcoind -conf option used)',
type=str, action='store', default=None, dest='bitcoind_config_path')
bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
help='connect to this address (default: 127.0.0.1)',
type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
type=int, action='store', default=None, dest='bitcoind_rpc_port')
bitcoind_group.add_argument('--bitcoind-rpc-ssl',
help='connect to JSON-RPC interface using SSL',
action='store_true', default=False, dest='bitcoind_rpc_ssl')
bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
type=int, action='store', default=None, dest='bitcoind_p2p_port')
bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
args = parser.parse_args()
if args.debug:
p2pool.DEBUG = True
defer.setDebugging(True)
else:
p2pool.DEBUG = False
net_name = args.net_name + ('_testnet' if args.testnet else '')
net = networks.nets[net_name]
datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
if not os.path.exists(datadir_path):
os.makedirs(datadir_path)
if len(args.bitcoind_rpc_userpass) > 2:
parser.error('a maximum of two arguments are allowed')
args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
if args.bitcoind_rpc_password is None:
conf_path = args.bitcoind_config_path or net.PARENT.CONF_FILE_FUNC()
if not os.path.exists(conf_path):
parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
'''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
'''\r\n'''
'''server=1\r\n'''
'''rpcpassword=%x\r\n'''
'''\r\n'''
'''Keep that password secret! After creating the file, restart Bitcoin.''' % (conf_path, random.randrange(2**128)))
conf = open(conf_path, 'rb').read()
contents = {}
for line in conf.splitlines(True):
if '#' in line:
line = line[:line.index('#')]
if '=' not in line:
continue
k, v = line.split('=', 1)
contents[k.strip()] = v.strip()
for conf_name, var_name, var_type in [
('rpcuser', 'bitcoind_rpc_username', str),
('rpcpassword', 'bitcoind_rpc_password', str),
('rpcport', 'bitcoind_rpc_port', int),
('port', 'bitcoind_p2p_port', int),
]:
if getattr(args, var_name) is None and conf_name in contents:
setattr(args, var_name, var_type(contents[conf_name]))
if 'rpcssl' in contents and contents['rpcssl'] != '0':
args.bitcoind_rpc_ssl = True
if args.bitcoind_rpc_password is None:
parser.error('''Bitcoin configuration file didn't contain an rpcpassword= line! Add one!''')
if args.bitcoind_rpc_username is None:
args.bitcoind_rpc_username = ''
if args.bitcoind_rpc_port is None:
args.bitcoind_rpc_port = net.PARENT.RPC_PORT
if args.bitcoind_p2p_port is None:
args.bitcoind_p2p_port = net.PARENT.P2P_PORT
if args.p2pool_port is None:
args.p2pool_port = net.P2P_PORT
if args.p2pool_outgoing_conns > 10:
parser.error('''--outgoing-conns can't be more than 10''')
if args.worker_endpoint is None:
worker_endpoint = '', net.WORKER_PORT
elif ':' not in args.worker_endpoint:
worker_endpoint = '', int(args.worker_endpoint)
else:
addr, port = args.worker_endpoint.rsplit(':', 1)
worker_endpoint = addr, int(port)
if args.address is not None:
try:
args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
except Exception, e:
parser.error('error parsing address: ' + repr(e))
else:
args.pubkey_hash = None
def separate_url(url):
s = urlparse.urlsplit(url)
if '@' not in s.netloc:
parser.error('merged url netloc must contain an "@"')
userpass, new_netloc = s.netloc.rsplit('@', 1)
return urlparse.urlunsplit(s._replace(netloc=new_netloc)), userpass
merged_urls = map(separate_url, args.merged_urls)
if args.logfile is None:
args.logfile = os.path.join(datadir_path, 'log')
logfile = logging.LogFile(args.logfile)
pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
sys.stdout = logging.AbortPipe(pipe)
sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
if hasattr(signal, "SIGUSR1"):
def sigusr1(signum, frame):
print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
logfile.reopen()
print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
signal.signal(signal.SIGUSR1, sigusr1)
deferral.RobustLoopingCall(logfile.reopen).start(5)
class ErrorReporter(object):
def __init__(self):
self.last_sent = None
def emit(self, eventDict):
if not eventDict["isError"]:
return
if self.last_sent is not None and time.time() < self.last_sent + 5:
return
self.last_sent = time.time()
if 'failure' in eventDict:
text = ((eventDict.get('why') or 'Unhandled Error')
+ '\n' + eventDict['failure'].getTraceback())
else:
text = " ".join([str(m) for m in eventDict["message"]]) + "\n"
from twisted.web import client
client.getPage(
url='http://u.forre.st/p2pool_error.cgi',
method='POST',
postdata=p2pool.__version__ + ' ' + net.NAME + '\n' + text,
timeout=15,
).addBoth(lambda x: None)
if not args.no_bugreport:
log.addObserver(ErrorReporter().emit)
reactor.callWhenRunning(main, args, net, datadir_path, merged_urls, worker_endpoint)
reactor.run()
| FreicoinAlliance/p2pool | p2pool/main.py | Python | gpl-3.0 | 30,436 |
#encoding: utf-8
import curses
import curses.panel
import struct
import sys
import termios
import weakref
#For more complex method of getting the size of screen
try:
import fcntl
except ImportError:
# Win32 platforms do not have fcntl
pass
from .container import Container
class Workspace(Container):
"""
"""
def __init__(self,
name=None,
parent_app=None,
min_height=24,
min_width=80,
max_height=None,
max_width=None,
#color='FORMDEFAULT',
#keypress_timeout=None,
#widget_list=None,
#cycle_widgets=False,
*args,
**kwargs):
if max_height is None:
self.auto_max_height = True
max_height = self.max_physical()[0]
else:
self.auto_max_height = False
if max_width is None:
self.auto_max_width = True
max_width = self.max_physical()[1]
else:
self.auto_max_width = False
#Attention! Widgets sets self.form and self.parent as weakrefs of their
#first instantiation arguments. Since Forms inherit from Widgets this
#means that Form.form and Form.parent will be weakrefs to self; as a
#consequence, inherited methods from Widget and Container that refer to
#self.form or self.parent should still work
super(Workspace, self).__init__(self, # self.workspace -> self
self, # self.parent -> self
#rely=0,
#relx=0,
max_height=max_height,
max_width=max_width,
*args,
**kwargs)
self.name = name
self.parent_app = weakref.proxy(parent_app)
self.min_height = min_height
self.min_width = min_width
#global APPLICATION_THEME_MANAGER
#if APPLICATION_THEME_MANAGER is None:
#self.theme_manager = theme_managers.ThemeManager()
#else:
#self.theme_manager = APPLICATION_THEME_MANAGER
#self.keypress_timeout = keypress_timeout
self.show_from_y = 0
self.show_from_x = 0
self.show_aty = 0
self.show_atx = 0
self.create_pad()
def create_pad(self):
#Safety margin by adding 1; avoids issues, like putting a character in
#the bottom right corner which causes an error as scrolling is not set
pad_height = self.max_height + 1
pad_width = self.max_width + 1
if self.min_height > self.max_height:
pad_height = self.min_height
if self.min_width > self.max_width:
pad_width = self.min_width
self.pad_height = pad_height
self.pad_width = pad_width
self.curses_pad = curses.newpad(pad_height, pad_width)
def max_physical(self):
"""
Returns the height and width of the physical screen.
"""
#On OS X newwin does not correctly get the size of the screen.
#let's see how big we could be: create a temp screen
#and see the size curses makes it. No good to keep, though
try:
max_y, max_x = struct.unpack('hh',
fcntl.ioctl(sys.stderr.fileno(),
termios.TIOCGWINSZ,
'xxxx'))
if (max_y, max_x) == (0, 0):
raise ValueError
except (ValueError, NameError):
max_y, max_x = curses.newwin(0, 0).getmaxyx()
return (max_y, max_x)
def resize(self):
pass
def _resize(self, inpt=None):
#This logic is arranged to ensure at most one call to max_physical
if self.auto_max_height and self.auto_max_width:
self.max_height, self.max_width = self.max_physical()
elif self.auto_max_height:
self.max_height = self.max_physical()[0]
elif self.auto_max_width:
self.max_width = self.max_physical()[1]
self.height = self.max_height
self.width = self.max_width
self.create_pad()
self.resize()
for containee in self.contained:
containee._resize()
#self._after_resizing_contained()
self.DISPLAY()
@asyncio.coroutine
def main(self):
pass | SavinaRoja/gruepy | gruepy/workspace.py | Python | gpl-3.0 | 4,617 |
#
# Copyright 2016 University of Oxford
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Sans IO protocol handling code to the GA4GH streaming API.
"""
from __future__ import division
from __future__ import print_function
import base64
import json
import logging
import time
from six.moves.urllib.parse import urlencode
from six.moves.urllib.parse import urlunparse
from six.moves.urllib.parse import urlparse
from six.moves.urllib.parse import parse_qs
import htsget.exceptions as exceptions
TICKET_ROOT_KEY = "htsget"
def ticket_request_url(
url, fmt=None, reference_name=None, reference_md5=None,
start=None, end=None, fields=None, tags=None, notags=None,
data_format=None):
parsed_url = urlparse(url)
get_vars = parse_qs(parsed_url.query)
# TODO error checking
if reference_name is not None:
get_vars["referenceName"] = reference_name
if reference_md5 is not None:
get_vars["referenceMD5"] = reference_md5
if start is not None:
get_vars["start"] = int(start)
if end is not None:
get_vars["end"] = int(end)
if data_format is not None:
get_vars["format"] = data_format.upper()
# if fields is not None:
# get_vars["fields"] = ",".join(fields)
# if tags is not None:
# get_vars["tags"] = ",".join(tags)
# if notags is not None:
# get_vars["notags"] = ",".join(notags)
new_url = list(parsed_url)
new_url[4] = urlencode(get_vars, doseq=True)
return urlunparse(new_url)
def parse_ticket(json_text):
"""
Parses the specified ticket response and returns a dictionary of the
contents of the root 'htsget' element.
"""
try:
parsed = json.loads(json_text)
except ValueError as ve:
raise exceptions.InvalidJsonError(ve)
if TICKET_ROOT_KEY not in parsed:
raise exceptions.MalformedJsonError()
return parsed[TICKET_ROOT_KEY]
class DownloadManager(object):
"""
Abstract implementation of the protocol.
"""
def __init__(
self, url, output, data_format=None, reference_name=None,
reference_md5=None, start=None, end=None, fields=None, tags=None,
notags=None, max_retries=5, timeout=10, retry_wait=5, bearer_token=None):
self.max_retries = max_retries
self.timeout = timeout
self.retry_wait = retry_wait
self.bearer_token = bearer_token
self.output = output
self.ticket_request_url = ticket_request_url(
url, data_format=data_format, reference_name=reference_name,
reference_md5=reference_md5, start=start, end=end, fields=fields,
tags=tags, notags=notags)
self.ticket = None
self.data_format = format
self.md5 = None
def __retry(self, method, *args):
completed = False
num_retries = 0
position_before = None
try:
# stdout does not support seek/tell, so we disable retry if this fails
position_before = self.output.tell()
except IOError:
pass
while not completed:
try:
method(*args)
completed = True
except exceptions.RetryableError as re:
if position_before is not None and num_retries < self.max_retries:
num_retries += 1
sleep_time = self.retry_wait # TODO exponential backoff
logging.warning(
"Error: '{}' occured; sleeping {}s before retrying "
"(attempt={})".format(re, sleep_time, num_retries))
self.output.seek(position_before)
time.sleep(sleep_time)
else:
raise re
def _ticket_request(self):
raise NotImplementedError()
def _handle_data_uri(self, parsed_url):
split = parsed_url.path.split(",", 1)
# TODO parse out the encoding properly.
description = split[0]
data = base64.b64decode(split[1])
logging.debug("handle_data_uri({}, length={})".format(description, len(data)))
self.output.write(data)
def _handle_http_url(url, headers):
raise NotImplementedError()
def run(self):
self.__retry(self._handle_ticket_request)
self.data_format = self.ticket.get("format", "BAM")
self.md5 = self.ticket.get("md5", None)
for url_object in self.ticket["urls"]:
url = urlparse(url_object["url"])
if url.scheme.startswith("http"):
headers = url_object.get("headers", "")
self.__retry(self._handle_http_url, urlunparse(url), headers)
elif url.scheme == "data":
self._handle_data_uri(url)
else:
raise ValueError("Unsupported URL scheme:{}".format(url.scheme))
| jeromekelleher/htsget | htsget/protocol.py | Python | apache-2.0 | 5,386 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Inventory', '0008_itemvendormodel_movementhistorymodel'),
]
operations = [
migrations.AddField(
model_name='movementhistorymodel',
name='Item',
field=models.ForeignKey(related_name='ItemHistory', default=None, blank=True, to='Inventory.ItemModel', null=True),
),
]
| alexharmenta/Inventationery | Inventationery/apps/Inventory/migrations/0009_movementhistorymodel_item.py | Python | bsd-3-clause | 509 |
#!/Python27/python
import cgi, cgitb
form = cgi.FieldStorage()
if form.getvalue('subject'):
subject = form.getvalue('subject')
else:
subject = "Not set"
print("Content-type:text/html\r\n\r\n")
print("<html>")
print("<head>")
print("<title>Radio for CGI Program</title>")
print("</head>")
print("<body>")
print("<h2> Selected Subject is %s</h2>" % subject)
print("</body>")
print("</html>") | 5610110083/Safety-in-residential-project | cgi-bin/exam/Example14_5.py | Python | apache-2.0 | 411 |
# Pyomniar
# Copyright 2011 Chris Kelly
# See LICENSE for details.
import httplib
import urllib
import time
import re
from pyomniar.error import OmniarError
from pyomniar.utils import convert_to_utf8_str
re_path_template = re.compile('{\w+}')
def bind_api(**config):
class APIMethod(object):
path = config['path']
allowed_param = config.get('allowed_param', [])
method = config.get('method', 'GET')
def __init__(self, api, args, kargs):
self.api = api
self.parser = kargs.pop('parser', self.api.parser)
self.post_data = kargs.pop('post_data', None)
self.retry_count = kargs.pop('retry_count', api.retry_count)
self.retry_delay = kargs.pop('retry_delay', api.retry_delay)
self.retry_errors = kargs.pop('retry_errors', api.retry_errors)
self.headers = kargs.pop('headers', {})
self.build_parameters(args, kargs)
self.api_root = api.api_root
# Perform any path variable substitution
self.build_path()
if api.secure:
self.scheme = 'https://'
else:
self.scheme = 'http://'
self.host = api.host
def build_parameters(self, args, kargs):
self.parameters = {}
for idx, arg in enumerate(args):
if arg is None:
continue
try:
self.parameters[self.allowed_param[idx]] = convert_to_utf8_str(arg)
except IndexError:
raise OmniarError('Too many parameters supplied!')
for k, arg in kargs.items():
if arg is None:
continue
if k in self.parameters:
raise OmniarError('Multiple values for parameter %s supplied!' % k)
self.parameters[k] = convert_to_utf8_str(arg)
def build_path(self):
for variable in re_path_template.findall(self.path):
name = variable.strip('{}')
if name == 'account_key' and 'account_key' not in self.parameters and self.api.auth:
# No 'user' parameter provided, fetch it from Auth instead.
value = self.api.auth.get_account_key()
else:
try:
value = urllib.quote(self.parameters[name])
except KeyError:
raise OmniarError('No parameter value found for path variable: %s' % name)
del self.parameters[name]
self.path = self.path.replace(variable, value)
def execute(self):
# Build the request URL
url = self.api_root + self.path
if self.api.auth:
self.api.auth.append_auth(
self.scheme + self.host + url,
self.method, self.headers, self.parameters
)
if len(self.parameters):
url = '%s?%s' % (url, urllib.urlencode(self.parameters))
# Continue attempting request until successful
# or maximum number of retries is reached.
retries_performed = 0
while retries_performed < self.retry_count + 1:
# Open connection
# FIXME: add timeout
if self.api.secure:
conn = httplib.HTTPSConnection(self.host)
else:
conn = httplib.HTTPConnection(self.host)
# Apply authentication
if self.api.auth:
self.api.auth.apply_auth(
self.scheme + self.host + url,
self.method, self.headers, self.parameters
)
# Execute request
try:
conn.request(self.method, url, headers=self.headers, body=self.post_data)
resp = conn.getresponse()
except Exception, e:
raise OmniarError('Failed to send request: %s' % e)
# Exit request loop if non-retry error code
if self.retry_errors:
if resp.status not in self.retry_errors: break
else:
if resp.status == 200: break
# Sleep before retrying request again
time.sleep(self.retry_delay)
retries_performed += 1
# If an error was returned, throw an exception
self.api.last_response = resp
if resp.status != 200:
try:
error_msg = self.parser.parse_error(resp.read())
except Exception:
error_msg = "Omniar error response: status code = %s" % resp.status
raise OmniarError(error_msg, resp)
# Parse the response payload
result = self.parser.parse(self, resp.read())
conn.close()
return result
def _call(api, *args, **kargs):
method = APIMethod(api, args, kargs)
return method.execute()
return _call | ckelly/pyomniar | pyomniar/binder.py | Python | mit | 5,295 |
from go.vumitools.tests.helpers import djangotest_imports
with djangotest_imports(globals()):
from go.apps.tests.view_helpers import AppViewsHelper
from go.base.tests.helpers import GoDjangoTestCase
class TestStaticReplyViews(GoDjangoTestCase):
def setUp(self):
self.app_helper = self.add_helper(AppViewsHelper(u'static_reply'))
self.client = self.app_helper.get_client()
def test_show_stopped(self):
"""
Test showing the conversation
"""
conv_helper = self.app_helper.create_conversation_helper(
name=u"myconv")
response = self.client.get(conv_helper.get_view_url('show'))
self.assertContains(response, u"<h1>myconv</h1>")
def test_show_running(self):
"""
Test showing the conversation
"""
conv_helper = self.app_helper.create_conversation_helper(
name=u"myconv", started=True)
response = self.client.get(conv_helper.get_view_url('show'))
self.assertContains(response, u"<h1>myconv</h1>")
def test_get_edit_empty_config(self):
conv_helper = self.app_helper.create_conversation_helper()
response = self.client.get(conv_helper.get_view_url('edit'))
self.assertEqual(response.status_code, 200)
def test_get_edit_small_config(self):
conv_helper = self.app_helper.create_conversation_helper(
{'reply_text': 'hello'})
response = self.client.get(conv_helper.get_view_url('edit'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'hello')
def test_edit_config(self):
conv_helper = self.app_helper.create_conversation_helper()
conv = conv_helper.get_conversation()
self.assertEqual(conv.config, {})
response = self.client.post(conv_helper.get_view_url('edit'), {
'reply_text': 'hello',
})
self.assertRedirects(response, conv_helper.get_view_url('show'))
conv = conv_helper.get_conversation()
self.assertEqual(conv.config, {'reply_text': 'hello'})
| praekelt/vumi-go | go/apps/static_reply/tests/test_views.py | Python | bsd-3-clause | 2,084 |
#!/usr/bin/env python
# Copyright (c) 2018 Piero Dalle Pezze
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
| pdp10/sbpipe | sbpipe/pl/create/__init__.py | Python | mit | 1,119 |
#!/usr/bin/env python
## if you wish to work with tensorflow v1 then ask it to emulate version 2 behavior
#import tensorflow.compat.v2 as tf
#tf.enable_v2_behavior()
#print(tf.__version__)
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import csv
import joblib
import time
from collections import Counter
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
# TensorFlow and tf.keras
import tensorflow as tf
from tensorflow import keras
## check hardware availability
from tensorflow.python.client import device_lib
print(device_lib.list_local_devices())
def get_data():
### load the data
fashion_mnist = keras.datasets.fashion_mnist
(train_images, train_labels), (test_images, test_labels) = fashion_mnist.load_data()
class_names = ['T-shirt/top', 'Trouser', 'Pullover', 'Dress', 'Coat', 'Sandal', 'Shirt',
'Sneaker', 'Bag', 'Ankle boot']
## Normalize pixel values to be between 0 and 1
#train_images, test_images = train_images / 255.0, test_images / 255.0
X_train = train_images
X_test = test_images
y_train = train_labels
y_test = test_labels
X_train = X_train.astype('float32') / 255
X_test = X_test.astype('float32') / 255
return(X_train,y_train,X_test,y_test)
def summarize_data(X_train,y_train,X_test,y_test):
print("-------------------------------------------")
print("X_train: {}".format(X_train.shape))
class_info = list(sorted(Counter(y_train).items()))
print("num classes: {}, classes: {}".format(len(class_info), [i[0] for i in class_info]))
print("class samples: {}".format([i[1] for i in class_info]))
print("class balance: {}".format([round(i[1]/X_train.shape[0],2) for i in class_info]))
print(X_train.shape)
print("-------------------------------------------")
def build_mlp(activation_fn='relu'):
"""
create a simple cnn
"""
model = keras.Sequential()
model.add(keras.layers.Flatten(input_shape=(28, 28)))
model.add(keras.layers.Dense(128, activation=activation_fn))
model.add(keras.layers.Dense(10, activation='softmax'))
return(model)
def build_cnn(activation_fn='relu',dropout=None):
"""
create a simple cnn
"""
num_classes = np.unique(y_train).size
if not dropout:
dropout = [False,False]
model = keras.Sequential()
model.add(keras.layers.Conv2D(28, (3, 3), activation=activation_fn, input_shape=(28, 28, 1)))
model.add(keras.layers.MaxPooling2D((2, 2)))
if dropout[0]:
model.add(tf.keras.layers.Dropout(0.3))
model.add(keras.layers.Conv2D(64, (3, 3), activation=activation_fn))
model.add(keras.layers.MaxPooling2D((2, 2)))
if dropout[0]:
model.add(tf.keras.layers.Dropout(0.3))
model.add(keras.layers.Conv2D(64, (3, 3), activation=activation_fn))
model.add(keras.layers.Flatten())
model.add(keras.layers.Dense(64, activation=activation_fn))
model.add(keras.layers.Dense(num_classes, activation='softmax'))
return(model)
def train_network(model_name,model,loss_fn,optimizer='adam'):
"""
compile, train and save the cnn
"""
## save model and logfile
save_dir = 'saved'
if not os.path.isdir(save_dir):
os.mkdir(save_dir)
saved_model = os.path.join(save_dir,"{}.h5".format(model_name))
## compile the model
if not os.path.exists(saved_model):
## compile and fit model
model.compile(optimizer=optimizer,
loss=loss_fn,
metrics=['accuracy'])
model.fit(X_train,
y_train,
batch_size=64,
epochs=10,
validation_data=(X_test, y_test))
model.save(saved_model)
## evaluate model
test_loss, test_acc = model.evaluate(X_test, y_test, verbose=2)
## save a log file
log_file = os.path.join(save_dir,"{}.log".format(model_name))
with open(log_file, 'w') as csvfile:
writer = csv.writer(csvfile)
writer.writerow(["loss_function",loss_fn])
writer.writerow(["optimizer", optimizer])
writer.writerow(["test_loss",test_loss])
writer.writerow(["test_acc",test_acc])
else:
print("... loading saved model")
model = keras.models.load_model(saved_model)
return(model)
if __name__ == "__main__":
## get data
X_train,y_train,X_test,y_test = get_data()
summarize_data(X_train,y_train,X_test,y_test)
## build and train a MLP
## YOUR CODE HERE (build and train a MLP)
model_mlp = build_mlp(activation_fn='relu')
model_mlp = train_network("simple_mlp", model_mlp, "sparse_categorical_crossentropy",
X_train,
y_train,
X_test,
y_test,
optimizer='adam')
## add the channel dimensions to your data
X_train_1 = np.expand_dims(X_train, -1)
X_test_1 = np.expand_dims(X_test, -1)
model_cnn = build_cnn(activation_fn='relu')
train_network("cnn", model_cnn, "categorical_crossentropy",
X_train_1,
y_train,
X_test_1,
y_test,
optimizer='adam')
## make predictions
#predictions = model.predict(test_images)
#predictions = np.argmax(predictions)
| ajrichards/bayesian-examples | deep-learning/cnn-fashion-mnist.py | Python | bsd-3-clause | 5,587 |
#!/usr/bin/env python
import pexpect
import unittest
import PexpectTestCase
import time
import os
class TestCtrlChars(PexpectTestCase.PexpectTestCase):
def test_control_chars (self):
"""FIXME: Python unicode was too hard to figure out, so
this tests only the true ASCII characters. This is lame
and should be fixed. I'm leaving this script here as a
placeholder so that it will remind me to fix this one day.
This is what it used to do:
This tests that we can send all 256 8-bit ASCII characters
to a child process."""
# FIXME: Getting this to support Python's Unicode was
# too hard, so I disabled this. I should fix this one day.
return 0
child = pexpect.spawn('python getch.py')
try:
for i in range(256):
# child.send(unicode('%d'%i, encoding='utf-8'))
child.send(chr(i))
child.expect ('%d\r\n' % i)
except Exception, e:
msg = "Did not echo character value: " + str(i) + "\n"
msg = msg + str(e)
self.fail(msg)
def test_sendintr (self):
try:
child = pexpect.spawn('python getch.py')
time.sleep(0.5)
child.sendintr()
child.expect ('3\r\n')
except Exception, e:
msg = "Did not echo character value: 3\n"
msg = msg + str(e)
self.fail(msg)
def test_bad_sendcontrol_chars (self):
"""This tests that sendcontrol will return 0 for an unknown char. """
child = pexpect.spawn('python getch.py')
retval = child.sendcontrol('1')
assert retval == 0, "sendcontrol() should have returned 0 because there is no such thing as ctrl-1."
def test_sendcontrol(self):
"""This tests that we can send all special control codes by name.
"""
child = pexpect.spawn('python getch.py')
#child.delaybeforesend = 0.1
for i in 'abcdefghijklmnopqrstuvwxyz':
child.sendcontrol(i)
child.expect ('[0-9]+\r\n')
#print child.after
child.sendcontrol('@')
child.expect ('0\r\n')
#print child.after
child.sendcontrol('[')
child.expect ('27\r\n')
#print child.after
child.sendcontrol('\\')
child.expect ('28\r\n')
#print child.after
child.sendcontrol(']')
child.expect ('29\r\n')
#print child.after
child.sendcontrol('^')
child.expect ('30\r\n')
#print child.after
child.sendcontrol('_')
child.expect ('31\r\n')
#print child.after
child.sendcontrol('?')
child.expect ('127\r\n')
#print child.after
if __name__ == '__main__':
unittest.main()
suite = unittest.makeSuite(TestCtrlChars,'test')
| serialx/pexpect-u | pexpect/tests/test_ctrl_chars.py | Python | mit | 2,864 |
# This file is part of Gem.
#
# Gem is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Gem is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Gem. If not, see <http://www.gnu.org/licenses/\>.
from gem.api import Location
from enum import Enum
LOG_TAG = "player"
def player_position_update(player, location, warped):
profile = player.profile
profile.location = location
| kaye64/gem | content/player.py | Python | gpl-3.0 | 838 |
import numpy as np
from mdtraj.testing import eq
from sklearn.base import clone
from sklearn.metrics import adjusted_rand_score
from msmbuilder.cluster import LandmarkAgglomerative
random = np.random.RandomState(2)
def test_1():
x = [random.randn(10, 2), random.randn(10, 2)]
n_clusters = 2
model1 = LandmarkAgglomerative(n_clusters=n_clusters)
model2 = LandmarkAgglomerative(n_clusters=n_clusters,
n_landmarks=sum(len(s) for s in x))
labels0 = clone(model1).fit(x).predict(x)
labels1 = model1.fit_predict(x)
labels2 = model2.fit_predict(x)
assert len(labels0) == 2
assert len(labels1) == 2
assert len(labels2) == 2
eq(labels0[0], labels1[0])
eq(labels0[1], labels1[1])
eq(labels0[0], labels2[0])
eq(labels0[1], labels2[1])
assert len(np.unique(np.concatenate(labels0))) == n_clusters
def test_2():
# this should be a really easy clustering problem
x = [random.randn(20, 2) + 10, random.randn(20, 2)]
n_clusters = 2
model1 = LandmarkAgglomerative(n_clusters=n_clusters)
model2 = LandmarkAgglomerative(n_clusters=n_clusters,
landmark_strategy='random',
random_state=random, n_landmarks=20)
labels1 = model1.fit_predict(x)
labels2 = model2.fit_predict(x)
assert adjusted_rand_score(np.concatenate(labels1),
np.concatenate(labels2)) == 1.0
def test_callable_metric():
def my_euc(target, ref, i):
return np.sqrt(np.sum((target - ref[i]) ** 2, axis=1))
model1 = LandmarkAgglomerative(n_clusters=10, n_landmarks=20,
metric='euclidean')
model2 = LandmarkAgglomerative(n_clusters=10, n_landmarks=20, metric=my_euc)
data = np.random.RandomState(0).randn(100, 2)
eq(model1.fit_predict([data])[0], model2.fit_predict([data])[0])
| stephenliu1989/msmbuilder | msmbuilder/tests/test_agglomerative.py | Python | lgpl-2.1 | 1,925 |
import bge
import math
pos = 1
def main():
global pos
pos += 1
# print(dir(object))
#cont = bge.logic.getCurrentController()
#own = cont.owner
#move = cont.actuators["move"]
#cont.activate(move)
#move.dLoc=[0,0.001,0]
scene = bge.logic.getCurrentScene()
object = scene.objects["Spinal_Cord"]
rotation = [ 0.0, 0.1, 0.0]
object.applyRotation( rotation, False)
object = scene.objects["Skull.Left"]
rotation = [ 0.0, -0.01, 0.0]
object.applyRotation( rotation, False)
object = scene.objects["Servo_Jaw_Drive_shaft"]
rotation = [ 0.0, -0.01, 0.0]
object.applyRotation( rotation, False)
object = scene.objects["MRL_logo"]
rotation = [ 0.01, 0.01, 0.0]
object.applyRotation( rotation, False)
object = scene.objects["Ear_speaker_organic"]
rotation = [ 0.01, 0.01, 0.0]
object.applyRotation( rotation, False)
object = scene.objects["Skull.Right"]
rotation = [ 0.01, 0.01, 0.0]
object.applyRotation( rotation, True)
# cont = bge.logic.getCurrentController()
# own = cont.owner
#print (a)
# xyz = own.localOrientation.to_euler()
# xyz[0] = math.radians(pos/8)
# own.localOrientation = xyz.to_matrix()
main()
| sstocker46/pyrobotlab | home/GroG/crazy.py | Python | apache-2.0 | 1,320 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('history', '0003_auto_20150914_1609'),
]
operations = [
migrations.AddField(
model_name='event',
name='title_en',
field=models.CharField(default=b'', max_length=500, blank=True),
),
migrations.AddField(
model_name='event',
name='title_es',
field=models.CharField(default=b'', max_length=500, blank=True),
),
migrations.AddField(
model_name='person',
name='name_en',
field=models.CharField(default=b'', max_length=255, blank=True),
),
migrations.AddField(
model_name='person',
name='name_es',
field=models.CharField(default=b'', max_length=255, blank=True),
),
]
| ctrl-alt-delete/portafolio | history/migrations/0004_auto_20150914_1618.py | Python | mit | 961 |
from .common import Benchmark
import numpy as np
UNARY_UFUNCS = [obj for obj in np.core.umath.__dict__.values() if
isinstance(obj, np.ufunc)]
UNARY_OBJECT_UFUNCS = [uf for uf in UNARY_UFUNCS if "O->O" in uf.types]
UNARY_OBJECT_UFUNCS.remove(getattr(np, 'invert'))
stride = [1, 2, 4]
stride_out = [1, 2, 4]
dtype = ['f', 'd']
class Unary(Benchmark):
params = [UNARY_OBJECT_UFUNCS, stride, stride_out, dtype]
param_names = ['ufunc', 'stride_in', 'stride_out', 'dtype']
timeout = 10
def setup(self, ufuncname, stride, stride_out, dtype):
np.seterr(all='ignore')
try:
self.f = ufuncname
except AttributeError:
raise NotImplementedError(f"No ufunc {ufuncname} found") from None
N = 100000
self.arr_out = np.empty(stride_out*N, dtype)
self.arr = np.random.rand(stride*N).astype(dtype)
if (ufuncname.__name__ == 'arccosh'):
self.arr = 1.0 + self.arr
def time_ufunc(self, ufuncname, stride, stride_out, dtype):
self.f(self.arr[::stride], self.arr_out[::stride_out])
class AVX_UFunc_log(Benchmark):
params = [stride, dtype]
param_names = ['stride', 'dtype']
timeout = 10
def setup(self, stride, dtype):
np.seterr(all='ignore')
N = 10000
self.arr = np.array(np.random.random_sample(stride*N), dtype=dtype)
def time_log(self, stride, dtype):
np.log(self.arr[::stride])
binary_ufuncs = [
'maximum', 'minimum', 'fmax', 'fmin'
]
binary_dtype = ['f', 'd']
class Binary(Benchmark):
param_names = ['ufunc', 'stride_in0', 'stride_in1', 'stride_out', 'dtype']
params = [binary_ufuncs, stride, stride, stride_out, binary_dtype]
timeout = 10
def setup(self, ufuncname, stride_in0, stride_in1, stride_out, dtype):
np.seterr(all='ignore')
try:
self.f = getattr(np, ufuncname)
except AttributeError:
raise NotImplementedError(f"No ufunc {ufuncname} found") from None
N = 100000
self.arr1 = np.array(np.random.rand(stride_in0*N), dtype=dtype)
self.arr2 = np.array(np.random.rand(stride_in1*N), dtype=dtype)
self.arr_out = np.empty(stride_out*N, dtype)
def time_ufunc(self, ufuncname, stride_in0, stride_in1, stride_out, dtype):
self.f(self.arr1[::stride_in0], self.arr2[::stride_in1],
self.arr_out[::stride_out])
binary_int_ufuncs = ['maximum', 'minimum']
binary_int_dtype = ['b', 'B', 'h', 'H', 'i', 'I', 'l', 'L', 'q', 'Q']
class BinaryInt(Binary):
param_names = ['ufunc', 'stride_in0', 'stride_in1', 'stride_out', 'dtype']
params = [binary_int_ufuncs, stride, stride, stride_out, binary_int_dtype]
class AVX_ldexp(Benchmark):
params = [dtype, stride]
param_names = ['dtype', 'stride']
timeout = 10
def setup(self, dtype, stride):
np.seterr(all='ignore')
self.f = getattr(np, 'ldexp')
N = 10000
self.arr1 = np.array(np.random.rand(stride*N), dtype=dtype)
self.arr2 = np.array(np.random.rand(stride*N), dtype='i')
def time_ufunc(self, dtype, stride):
self.f(self.arr1[::stride], self.arr2[::stride])
cmplx_bfuncs = ['add',
'subtract',
'multiply',
'divide']
cmplxstride = [1, 2, 4]
cmplxdtype = ['F', 'D']
class AVX_cmplx_arithmetic(Benchmark):
params = [cmplx_bfuncs, cmplxstride, cmplxdtype]
param_names = ['bfunc', 'stride', 'dtype']
timeout = 10
def setup(self, bfuncname, stride, dtype):
np.seterr(all='ignore')
try:
self.f = getattr(np, bfuncname)
except AttributeError:
raise NotImplementedError(f"No bfunc {bfuncname} found") from None
N = 10000
self.arr1 = np.ones(stride*N, dtype)
self.arr2 = np.ones(stride*N, dtype)
def time_ufunc(self, bfuncname, stride, dtype):
self.f(self.arr1[::stride], self.arr2[::stride])
cmplx_ufuncs = ['reciprocal',
'absolute',
'square',
'conjugate']
class AVX_cmplx_funcs(Benchmark):
params = [cmplx_ufuncs, cmplxstride, cmplxdtype]
param_names = ['bfunc', 'stride', 'dtype']
timeout = 10
def setup(self, bfuncname, stride, dtype):
np.seterr(all='ignore')
try:
self.f = getattr(np, bfuncname)
except AttributeError:
raise NotImplementedError(f"No bfunc {bfuncname} found") from None
N = 10000
self.arr1 = np.ones(stride*N, dtype)
def time_ufunc(self, bfuncname, stride, dtype):
self.f(self.arr1[::stride])
class Mandelbrot(Benchmark):
def f(self,z):
return np.abs(z) < 4.0
def g(self,z,c):
return np.sum(np.multiply(z,z) + c)
def mandelbrot_numpy(self, c, maxiter):
output = np.zeros(c.shape, np.int)
z = np.empty(c.shape, np.complex64)
for it in range(maxiter):
notdone = self.f(z)
output[notdone] = it
z[notdone] = self.g(z[notdone],c[notdone])
output[output == maxiter-1] = 0
return output
def mandelbrot_set(self,xmin,xmax,ymin,ymax,width,height,maxiter):
r1 = np.linspace(xmin, xmax, width, dtype=np.float32)
r2 = np.linspace(ymin, ymax, height, dtype=np.float32)
c = r1 + r2[:,None]*1j
n3 = self.mandelbrot_numpy(c,maxiter)
return (r1,r2,n3.T)
def time_mandel(self):
self.mandelbrot_set(-0.74877,-0.74872,0.06505,0.06510,1000,1000,2048)
class LogisticRegression(Benchmark):
param_names = ['dtype']
params = [np.float32, np.float64]
timeout = 1000
def train(self, max_epoch):
for epoch in range(max_epoch):
z = np.matmul(self.X_train, self.W)
A = 1 / (1 + np.exp(-z)) # sigmoid(z)
loss = -np.mean(self.Y_train * np.log(A) + (1-self.Y_train) * np.log(1-A))
dz = A - self.Y_train
dw = (1/self.size) * np.matmul(self.X_train.T, dz)
self.W = self.W - self.alpha*dw
def setup(self, dtype):
np.random.seed(42)
self.size = 250
features = 16
self.X_train = np.random.rand(self.size,features).astype(dtype)
self.Y_train = np.random.choice(2,self.size).astype(dtype)
# Initialize weights
self.W = np.zeros((features,1), dtype=dtype)
self.b = np.zeros((1,1), dtype=dtype)
self.alpha = 0.1
def time_train(self, dtype):
self.train(1000)
| numpy/numpy | benchmarks/benchmarks/bench_ufunc_strides.py | Python | bsd-3-clause | 6,500 |
"""
Content negotiation deals with selecting an appropriate renderer given the
incoming request. Typically this will be based on the request's Accept header.
"""
from __future__ import unicode_literals
from django.http import Http404
from rest_framework import HTTP_HEADER_ENCODING, exceptions
from rest_framework.settings import api_settings
from rest_framework.utils.mediatypes import (
_MediaType, media_type_matches, order_by_precedence
)
class BaseContentNegotiation(object):
def select_parser(self, request, parsers):
raise NotImplementedError('.select_parser() must be implemented')
def select_renderer(self, request, renderers, format_suffix=None):
raise NotImplementedError('.select_renderer() must be implemented')
class DefaultContentNegotiation(BaseContentNegotiation):
settings = api_settings
def select_parser(self, request, parsers):
"""
Given a list of parsers and a media type, return the appropriate
parser to handle the incoming request.
"""
for parser in parsers:
if media_type_matches(parser.media_type, request.content_type):
return parser
return None
def select_renderer(self, request, renderers, format_suffix=None):
"""
Given a request and a list of renderers, return a two-tuple of:
(renderer, media type).
"""
# Allow URL style format override. eg. "?format=json
format_query_param = self.settings.URL_FORMAT_OVERRIDE
format = format_suffix or request.query_params.get(format_query_param)
if format:
renderers = self.filter_renderers(renderers, format)
accepts = self.get_accept_list(request)
# Check the acceptable media types against each renderer,
# attempting more specific media types first
# NB. The inner loop here isn't as bad as it first looks :)
# Worst case is we're looping over len(accept_list) * len(self.renderers)
for media_type_set in order_by_precedence(accepts):
for renderer in renderers:
for media_type in media_type_set:
if media_type_matches(renderer.media_type, media_type):
# Return the most specific media type as accepted.
media_type_wrapper = _MediaType(media_type)
if (
_MediaType(renderer.media_type).precedence >
media_type_wrapper.precedence
):
# Eg client requests '*/*'
# Accepted media type is 'application/json'
full_media_type = ';'.join(
(renderer.media_type,) +
tuple('{0}={1}'.format(
key, value.decode(HTTP_HEADER_ENCODING))
for key, value in media_type_wrapper.params.items()))
return renderer, full_media_type
else:
# Eg client requests 'application/json; indent=8'
# Accepted media type is 'application/json; indent=8'
return renderer, media_type
raise exceptions.NotAcceptable(available_renderers=renderers)
def filter_renderers(self, renderers, format):
"""
If there is a '.json' style format suffix, filter the renderers
so that we only negotiation against those that accept that format.
"""
renderers = [renderer for renderer in renderers
if renderer.format == format]
if not renderers:
raise Http404
return renderers
def get_accept_list(self, request):
"""
Given the incoming request, return a tokenized list of media
type strings.
"""
header = request.META.get('HTTP_ACCEPT', '*/*')
return [token.strip() for token in header.split(',')]
| OpenWinCon/OpenWinNet | web-gui/myvenv/lib/python3.4/site-packages/rest_framework/negotiation.py | Python | apache-2.0 | 4,084 |
from sys import argv
graphite_server="ec2-54-242-170-176.compute-1.amazonaws.com"
if len(argv) > 2:
graphite_server = argv[1]
import urllib2
def fetchone(query):
response = urllib2.urlopen("http://%s/render?format=raw&target=%s" % (graphite_server, query))
html = response.read().replace("None,", "").replace(",None", "")
html = html.split(',')
retstr = html[len(html)-1].strip('\n')
if retstr == 'None' or not retstr:
return -1
return float(retstr)
metrics = {
"leveldb-get-count" :
"sumSeries(*.edu.berkeley.thebes.common.persistence.disk.LevelDBPersistenceEngine.get-requests.count)",
"leveldb-put-count" :
"sumSeries(*.edu.berkeley.thebes.common.persistence.disk.LevelDBPersistenceEngine.put-requests.count)",
"leveldb-total-mean-put-latency" :
"sumSeries(*.edu.berkeley.thebes.common.persistence.disk.LevelDBPersistenceEngine.leveldb-put-latencies.mean)",
"leveldb-total-mean-get-latency" :
"sumSeries(*.edu.berkeley.thebes.common.persistence.disk.LevelDBPersistenceEngine.leveldb-get-latencies.mean)",
"leveldb-total-99.9th-put-latency" :
"sumSeries(*.edu.berkeley.thebes.common.persistence.disk.LevelDBPersistenceEngine.leveldb-put-latencies.999percentile)",
"leveldb-total-99.9th-get-latency" :
"sumSeries(*.edu.berkeley.thebes.common.persistence.disk.LevelDBPersistenceEngine.leveldb-get-latencies.999percentile)",
"antientropy-ack-txn-pending-count" :
"sumSeries(*.edu.berkeley.thebes.hat.server.antientropy.AntiEntropyServiceHandler.ack-transaction-pending-requests.count)",
"antientropy-put-count" :
"sumSeries(*.edu.berkeley.thebes.hat.server.antientropy.AntiEntropyServiceHandler.put-requests.count)",
"antientropy-router-write-announce-count" :
"sumSeries(*.edu.berkeley.thebes.hat.server.antientropy.clustering.AntiEntropyServiceRouter.write-announce-events.count)",
"antientropy-router-write-forward-count" :
"sumSeries(*.edu.berkeley.thebes.hat.server.antientropy.clustering.AntiEntropyServiceRouter.write-forward-events.count)",
"dep-resolver-dgood-txn-count" :
"sumSeries(*.edu.berkeley.thebes.hat.server.dependencies.DependencyResolver.dgood-transaction-total.count)",
"dep-resolver-pending-count" :
"sumSeries(*.edu.berkeley.thebes.hat.server.dependencies.DependencyResolver.num-pending-versions.value)",
"replica-service-get-count" :
"sumSeries(*.edu.berkeley.thebes.hat.server.replica.ReplicaServiceHandler.get-requests.count)",
"replica-service-put-count" :
"sumSeries(*.edu.berkeley.thebes.hat.server.replica.ReplicaServiceHandler.put-requests.count)"
}
tocheck = metrics.keys()
tocheck.sort()
for key in tocheck:
print key, fetchone(metrics[key])
| pbailis/hat-vldb2014-code | prototype-database-code/scripts/fetch_stats_from_graphite.py | Python | apache-2.0 | 2,804 |
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.build_graph.target import Target
from pants.task.console_task import ConsoleTask
class MinimalCover(ConsoleTask):
"""Print a minimal covering set of targets.
For a given set of input targets, the output targets transitive dependency set will include all
the input targets without gaps.
"""
def console_output(self, _):
internal_deps = self._collect_internal_deps(self.context.target_roots)
minimal_cover = set()
for target in self.context.target_roots:
if target not in internal_deps and target not in minimal_cover:
minimal_cover.add(target)
yield target.address.spec
def _collect_internal_deps(self, targets):
"""Collect one level of dependencies from the given targets, and then transitively walk.
This is different from directly executing `Target.closure_for_targets`, because the
resulting set will not include the roots unless the roots depend on one another.
"""
roots = set()
for target in targets:
roots.update(target.dependencies)
return Target.closure_for_targets(roots)
| landism/pants | src/python/pants/backend/graph_info/tasks/minimal_cover.py | Python | apache-2.0 | 1,380 |
import mock
import pytest
from addons.wiki.models import WikiVersion
from django.core.exceptions import ValidationError
from django.utils import timezone
from framework.auth.core import Auth
from framework.exceptions import PermissionsError
from nose.tools import assert_raises
from osf.models import Node, Registration, Sanction, RegistrationSchema, NodeLog
from addons.wiki.models import WikiPage
from osf.utils.permissions import ADMIN
from website import settings
from . import factories
from .utils import assert_datetime_equal, mock_archive
from osf_tests.factories import get_default_metaschema, DraftRegistrationFactory
from addons.wiki.tests.factories import WikiFactory, WikiVersionFactory
from api.providers.workflows import Workflows
from osf.migrations import update_provider_auth_groups
from osf.models.action import RegistrationAction
from osf_tests.management_commands.test_migration_registration_responses import (
prereg_registration_responses,
prereg_registration_metadata_built,
veer_registration_responses,
veer_condensed
)
from osf.utils.workflows import (
RegistrationModerationStates,
RegistrationModerationTriggers,
ApprovalStates
)
pytestmark = pytest.mark.django_db
@pytest.fixture()
def user():
return factories.UserFactory()
@pytest.fixture()
def project(user, auth, fake):
ret = factories.ProjectFactory(creator=user)
ret.add_tag(fake.word(), auth=auth)
return ret
@pytest.fixture()
def auth(user):
return Auth(user)
# copied from tests/test_models.py
def test_factory(user, project):
# Create a registration with kwargs
registration1 = factories.RegistrationFactory(
title='t1', description='d1', creator=user,
)
assert registration1.title == 't1'
assert registration1.description == 'd1'
assert registration1.contributors.count() == 1
assert user in registration1.contributors.all()
assert registration1.registered_user == user
assert registration1.private_links.count() == 0
# Create a registration from a project
user2 = factories.UserFactory()
project.add_contributor(user2)
data = {'some': 'data'}
draft_reg = DraftRegistrationFactory(registration_metadata=data, branched_from=project)
registration2 = factories.RegistrationFactory(
project=project,
user=user2,
draft_registration=draft_reg,
)
assert registration2.registered_from == project
assert registration2.registered_user == user2
class TestRegistration:
def test_registered_schema_id(self):
reg = factories.RegistrationFactory()
assert reg.registered_schema_id == reg.registered_schema.get()._id
# Regression test for https://openscience.atlassian.net/browse/PLAT-776
# Some very old registrations on prod don't have a schema
def test_registered_schema_id_with_no_schema(self):
reg = factories.RegistrationFactory()
reg.registered_schema.clear()
assert reg.registered_schema_id is None
def test_update_category(self, auth):
reg = factories.RegistrationFactory(category='instrumentation')
new_category = 'software'
reg.update({'category': new_category}, auth=auth)
assert reg.category == new_category
last_log = reg.logs.latest()
assert last_log.action == NodeLog.CATEGORY_UPDATED
assert last_log.params['category_new'] == new_category
assert last_log.params['category_original'] == 'instrumentation'
def test_update_article_doi(self, auth):
reg = factories.RegistrationFactory()
reg.article_doi = '10.1234/giraffe'
reg.save()
new_article_doi = '10.12345/elephant'
reg.update({'article_doi': new_article_doi}, auth=auth)
assert reg.article_doi == new_article_doi
last_log = reg.logs.latest()
assert last_log.action == NodeLog.ARTICLE_DOI_UPDATED
assert last_log.params['article_doi_new'] == new_article_doi
assert last_log.params['article_doi_original'] == '10.1234/giraffe'
# copied from tests/test_models.py
class TestRegisterNode:
@pytest.fixture()
def registration(self, project):
reg = factories.RegistrationFactory(project=project)
private_link = factories.PrivateLinkFactory()
private_link.nodes.add(reg)
private_link.save()
return reg
def test_does_not_have_addon_added_log(self, registration):
# should not have addon_added log from wiki addon being added
assert NodeLog.ADDON_ADDED not in list(registration.logs.values_list('action', flat=True))
def test_title(self, registration, project):
assert registration.title == project.title
def test_description(self, registration, project):
assert registration.description == project.description
def test_category(self, registration, project):
assert registration.category == project.category
def test_permissions(self, registration, project):
assert registration.is_public is False
project.set_privacy(Node.PUBLIC)
registration = factories.RegistrationFactory(project=project)
assert registration.is_public is False
def test_contributors(self, registration, project):
assert registration.contributors.count() == project.contributors.count()
assert (
set(registration.contributors.values_list('id', flat=True)) ==
set(project.contributors.values_list('id', flat=True))
)
def test_forked_from(self, registration, project, auth):
# A a node that is not a fork
assert registration.forked_from is None
# A node that is a fork
fork = project.fork_node(auth)
registration = factories.RegistrationFactory(project=fork)
assert registration.forked_from == project
def test_private_links(self, registration, project):
assert registration.private_links != project.private_links
def test_creator(self, registration, project, user):
user2 = factories.UserFactory()
project.add_contributor(user2)
registration = factories.RegistrationFactory(project=project)
assert registration.creator == user
def test_logs(self, registration, project):
# Registered node has all logs except for registration approval initiated
assert project.logs.count() - 1 == registration.logs.count()
assert project.logs.first().action == 'registration_initiated'
project_second_log = project.logs.all()[:2][1]
assert registration.logs.first().action == project_second_log.action
def test_tags(self, registration, project):
assert (
set(registration.tags.values_list('name', flat=True)) ==
set(project.tags.values_list('name', flat=True))
)
def test_nodes(self, project, user):
# Create some nodes
# component of project
factories.NodeFactory(
creator=user,
parent=project,
title='Title1',
)
subproject = factories.ProjectFactory(
creator=user,
parent=project,
title='Title2',
)
# component of subproject
factories.NodeFactory(
creator=user,
parent=subproject,
title='Title3',
)
# Make a registration
registration = factories.RegistrationFactory(project=project)
# Reload the registration; else test won't catch failures to save
registration.refresh_from_db()
# Registration has the nodes
assert registration._nodes.count() == 2
assert(
set(registration._nodes.values_list('title', flat=True)) ==
set(project._nodes.values_list('title', flat=True))
)
# Nodes are copies and not the original versions
for node in registration._nodes.all():
assert node not in project._nodes.all()
assert node.is_registration
def test_linked_nodes(self, project, user, auth):
linked_node = factories.ProjectFactory()
project.add_node_link(linked_node, auth=auth, save=True)
registration = factories.RegistrationFactory(project=project)
registration.refresh_from_db()
assert project.linked_nodes.count() == registration.linked_nodes.count()
assert project.linked_nodes.first().title == registration.linked_nodes.first().title
def test_private_contributor_registration(self, project, user):
# Create some nodes
# component
comp1 = factories.NodeFactory( # noqa
title='Comp1',
creator=user,
parent=project,
)
# subproject
comp2 = factories.ProjectFactory( # noqa
title='Comp1',
creator=user,
parent=project,
)
# Create some nodes to share
shared_component = factories.NodeFactory(
title='Shared Component',
creator=user,
parent=project,
)
shared_subproject = factories.ProjectFactory(
title='Shared Subproject',
creator=user,
parent=project,
)
# Share the project and some nodes
user2 = factories.UserFactory()
project.add_contributor(user2, permissions=ADMIN)
shared_component.add_contributor(user2, permissions=ADMIN)
shared_subproject.add_contributor(user2, permissions=ADMIN)
# Partial contributor registers the node
registration = factories.RegistrationFactory(project=project, user=user2)
# The correct subprojects were registered
for registered_node in registration._nodes.all():
assert registered_node.root == registration
assert registered_node.registered_from
assert registered_node.parent_node == registration
assert registered_node.registered_from.parent_node == project
def test_is_registration(self, registration):
assert registration.is_registration
def test_registered_date(self, registration):
# allowance increased in OSF-9050, if this fails sporadically again then registrations may need to be optimized or this test reworked
assert_datetime_equal(registration.registered_date, timezone.now(), allowance=10000)
def test_registered_addons(self, registration):
assert (
[addon.config.short_name for addon in registration.get_addons()] ==
[addon.config.short_name for addon in registration.registered_from.get_addons()]
)
def test_registered_user(self, project):
# Add a second contributor
user2 = factories.UserFactory()
project.add_contributor(user2, permissions=ADMIN)
# Second contributor registers project
registration = factories.RegistrationFactory(project=project, user=user2)
assert registration.registered_user == user2
def test_registered_from(self, registration, project):
assert registration.registered_from == project
def test_registered_get_absolute_url(self, registration):
assert (
registration.get_absolute_url() ==
'{}v2/registrations/{}/'.format(settings.API_DOMAIN, registration._id)
)
def test_registration_list(self, registration, project):
assert registration._id in [n._id for n in project.registrations_all]
def test_registration_gets_institution_affiliation(self, user):
node = factories.NodeFactory()
institution = factories.InstitutionFactory()
user.affiliated_institutions.add(institution)
user.save()
node.add_affiliated_institution(institution, user=user)
node.save()
registration = factories.RegistrationFactory(project=node)
assert (
set(registration.affiliated_institutions.values_list('id', flat=True)) ==
set(node.affiliated_institutions.values_list('id', flat=True))
)
def test_registration_of_project_with_no_wiki_pages(self, registration):
assert WikiPage.objects.get_wiki_pages_latest(registration).exists() is False
assert registration.wikis.all().exists() is False
assert registration.wiki_private_uuids == {}
@mock.patch('website.project.signals.after_create_registration')
def test_registration_clones_project_wiki_pages(self, mock_signal, project, user):
project = factories.ProjectFactory(creator=user, is_public=True)
wiki_page = WikiFactory(
user=user,
node=project,
)
wiki = WikiVersionFactory(
wiki_page=wiki_page,
)
current_wiki = WikiVersionFactory(
wiki_page=wiki_page,
identifier=2
)
draft_reg = factories.DraftRegistrationFactory(branched_from=project)
registration = project.register_node(get_default_metaschema(), Auth(user), draft_reg, None)
assert registration.wiki_private_uuids == {}
registration_wiki_current = WikiVersion.objects.get_for_node(registration, current_wiki.wiki_page.page_name)
assert registration_wiki_current.wiki_page.node == registration
assert registration_wiki_current._id != current_wiki._id
assert registration_wiki_current.identifier == 2
registration_wiki_version = WikiVersion.objects.get_for_node(registration, wiki.wiki_page.page_name, version=1)
assert registration_wiki_version.wiki_page.node == registration
assert registration_wiki_version._id != wiki._id
assert registration_wiki_version.identifier == 1
def test_legacy_private_registrations_can_be_made_public(self, registration, auth):
registration.is_public = False
registration.set_privacy(Node.PUBLIC, auth=auth)
assert registration.is_public
class TestRegisterNodeContributors:
@pytest.fixture()
def project_two(self, user, auth):
return factories.ProjectFactory(creator=user)
@pytest.fixture()
def component(self, user, auth, project_two):
return factories.NodeFactory(
creator=user,
parent=project_two,
)
@pytest.fixture()
def contributor_unregistered(self, user, auth, project_two):
ret = project_two.add_unregistered_contributor(fullname='Johnny Git Gud', email='[email protected]', auth=auth)
project_two.save()
return ret
@pytest.fixture()
def contributor_unregistered_no_email(self, user, auth, project_two, component):
ret = component.add_unregistered_contributor(fullname='Johnny B. Bard', email='', auth=auth)
component.save()
return ret
@pytest.fixture()
def registration(self, project_two, component, contributor_unregistered, contributor_unregistered_no_email):
with mock_archive(project_two, autoapprove=True) as registration:
return registration
def test_unregistered_contributors_unclaimed_records_get_copied(self, user, project, component, registration, contributor_unregistered, contributor_unregistered_no_email):
contributor_unregistered.refresh_from_db()
contributor_unregistered_no_email.refresh_from_db()
assert registration.contributors.filter(id=contributor_unregistered.id).exists()
assert registration._id in contributor_unregistered.unclaimed_records
# component
component_registration = registration.nodes[0]
assert component_registration.contributors.filter(id=contributor_unregistered_no_email.id).exists()
assert component_registration._id in contributor_unregistered_no_email.unclaimed_records
# copied from tests/test_registrations
class TestNodeApprovalStates:
def test_sanction_none(self):
node = factories.NodeFactory()
assert bool(node.sanction) is False
def test_sanction_embargo_termination_first(self):
embargo_termination_approval = factories.EmbargoTerminationApprovalFactory()
registration = Registration.objects.get(embargo_termination_approval=embargo_termination_approval)
assert registration.sanction == embargo_termination_approval
def test_sanction_retraction(self):
retraction = factories.RetractionFactory()
registration = Registration.objects.get(retraction=retraction)
assert registration.sanction == retraction
def test_sanction_embargo(self):
embargo = factories.EmbargoFactory()
registration = Registration.objects.get(embargo=embargo)
assert registration.sanction == embargo
def test_sanction_registration_approval(self):
registration_approval = factories.RegistrationApprovalFactory()
registration = Registration.objects.get(registration_approval=registration_approval)
assert registration.sanction == registration_approval
def test_sanction_searches_parents(self):
user = factories.UserFactory()
node = factories.ProjectFactory(creator=user)
child = factories.NodeFactory(creator=user, parent=node)
factories.NodeFactory(creator=user, parent=child)
with mock_archive(node) as registration:
approval = registration.registration_approval
sub_reg = registration._nodes.first()._nodes.first()
assert sub_reg.sanction == approval
def test_is_pending_registration(self):
registration_approval = factories.RegistrationApprovalFactory()
registration = Registration.objects.get(registration_approval=registration_approval)
assert registration_approval.is_pending_approval
assert registration.is_pending_registration
def test_is_pending_registration_searches_parents(self):
user = factories.UserFactory()
node = factories.ProjectFactory(creator=user)
child = factories.NodeFactory(creator=user, parent=node)
factories.NodeFactory(creator=user, parent=child)
with mock_archive(node) as registration:
sub_reg = registration._nodes.first()._nodes.first()
assert sub_reg.is_pending_registration
def test_is_registration_approved(self):
registration_approval = factories.RegistrationApprovalFactory(state=Sanction.APPROVED, approve=True)
registration = Registration.objects.get(registration_approval=registration_approval)
assert registration.is_registration_approved
def test_is_registration_approved_searches_parents(self):
user = factories.UserFactory()
node = factories.ProjectFactory(creator=user)
child = factories.NodeFactory(creator=user, parent=node)
factories.NodeFactory(creator=user, parent=child)
with mock_archive(node) as registration:
registration.registration_approval.state = Sanction.APPROVED
registration.registration_approval.save()
sub_reg = registration._nodes.first()._nodes.first()
assert sub_reg.is_registration_approved is True
def test_is_retracted(self):
retraction = factories.RetractionFactory(state=Sanction.APPROVED, approve=True)
registration = Registration.objects.get(retraction=retraction)
assert registration.is_retracted
@mock.patch('osf.models.node.AbstractNode.update_search')
def test_is_retracted_searches_parents(self, mock_update_search):
user = factories.UserFactory()
node = factories.ProjectFactory(creator=user)
child = factories.NodeFactory(creator=user, parent=node)
factories.NodeFactory(creator=user, parent=child)
with mock_archive(node, autoapprove=True, retraction=True, autoapprove_retraction=True) as registration:
sub_reg = registration._nodes.first()._nodes.first()
assert sub_reg.is_retracted is True
def test_is_pending_retraction(self):
retraction = factories.RetractionFactory()
registration = Registration.objects.get(retraction=retraction)
assert retraction.is_pending_approval is True
assert registration.is_pending_retraction is True
@mock.patch('osf.models.node.AbstractNode.update_search')
def test_is_pending_retraction_searches_parents(self, mock_update_search):
user = factories.UserFactory()
node = factories.ProjectFactory(creator=user)
child = factories.NodeFactory(creator=user, parent=node)
factories.NodeFactory(creator=user, parent=child)
with mock_archive(node, autoapprove=True, retraction=True) as registration:
sub_reg = registration._nodes.first()._nodes.first()
assert sub_reg.is_pending_retraction is True
def test_embargo_end_date(self):
embargo = factories.EmbargoFactory()
registration = Registration.objects.get(embargo=embargo)
assert registration.embargo_end_date == embargo.embargo_end_date
def test_embargo_end_date_searches_parents(self):
user = factories.UserFactory()
node = factories.ProjectFactory(creator=user)
child = factories.NodeFactory(creator=user, parent=node)
factories.NodeFactory(creator=user, parent=child)
with mock_archive(node, embargo=True) as registration:
sub_reg = registration._nodes.first()._nodes.first()
assert sub_reg.embargo_end_date == registration.embargo_end_date
def test_is_pending_embargo(self):
embargo = factories.EmbargoFactory()
registration = Registration.objects.get(embargo=embargo)
assert embargo.is_pending_approval
assert registration.is_pending_embargo
def test_is_pending_embargo_searches_parents(self):
user = factories.UserFactory()
node = factories.ProjectFactory(creator=user)
child = factories.NodeFactory(creator=user, parent=node)
factories.NodeFactory(creator=user, parent=child)
with mock_archive(node, embargo=True) as registration:
sub_reg = registration._nodes.first()._nodes.first()
assert sub_reg.is_pending_embargo
def test_is_embargoed(self):
embargo = factories.EmbargoFactory()
registration = Registration.objects.get(embargo=embargo)
registration.embargo.state = Sanction.APPROVED
registration.embargo.save()
assert registration.is_embargoed
def test_is_embargoed_searches_parents(self):
user = factories.UserFactory()
node = factories.ProjectFactory(creator=user)
child = factories.NodeFactory(creator=user, parent=node)
factories.NodeFactory(creator=user, parent=child)
with mock_archive(node, embargo=True, autoapprove=True) as registration:
sub_reg = registration._nodes.first()._nodes.first()
assert sub_reg.is_embargoed
@pytest.mark.enable_implicit_clean
class TestDOIValidation:
def test_validate_bad_doi(self):
reg = factories.RegistrationFactory()
with pytest.raises(ValidationError):
reg.article_doi = 'nope'
reg.save()
with pytest.raises(ValidationError):
reg.article_doi = 'https://dx.doi.org/10.123.456'
reg.save() # should save the bare DOI, not a URL
with pytest.raises(ValidationError):
reg.article_doi = 'doi:10.10.1038/nwooo1170'
reg.save() # should save without doi: prefix
def test_validate_good_doi(self):
reg = factories.RegistrationFactory()
doi = '10.11038/nwooo1170'
reg.article_doi = doi
reg.save()
assert reg.article_doi == doi
class TestRegistrationMixin:
@pytest.fixture()
def draft_prereg(self, prereg_schema):
return factories.DraftRegistrationFactory(
registration_schema=prereg_schema,
registration_metadata={},
)
@pytest.fixture()
def draft_veer(self, veer_schema):
return factories.DraftRegistrationFactory(
registration_schema=veer_schema,
registration_metadata={},
)
@pytest.fixture()
def prereg_schema(self):
return RegistrationSchema.objects.get(
name='Prereg Challenge',
schema_version=2
)
@pytest.fixture()
def veer_schema(self):
return RegistrationSchema.objects.get(
name__icontains='Pre-Registration in Social Psychology',
schema_version=2
)
def test_expand_registration_responses(self, draft_prereg):
draft_prereg.registration_responses = prereg_registration_responses
draft_prereg.save()
assert draft_prereg.registration_metadata == {}
registration_metadata = draft_prereg.expand_registration_responses()
assert registration_metadata == prereg_registration_metadata_built
def test_expand_registration_responses_veer(self, draft_veer):
draft_veer.registration_responses = veer_registration_responses
draft_veer.save()
assert draft_veer.registration_metadata == {}
registration_metadata = draft_veer.expand_registration_responses()
assert registration_metadata == veer_condensed
class TestRegistationModerationStates():
@pytest.fixture
def embargo(self):
return factories.EmbargoFactory()
@pytest.fixture
def registration_approval(self):
return factories.RegistrationApprovalFactory()
@pytest.fixture
def retraction(self):
return factories.RetractionFactory()
@pytest.fixture
def embargo_termination(self):
return factories.EmbargoTerminationApprovalFactory()
@pytest.fixture
def moderator(self):
return factories.AuthUserFactory()
@pytest.fixture
def provider(self, moderator):
provider = factories.RegistrationProviderFactory()
update_provider_auth_groups()
provider.get_group('moderator').user_set.add(moderator)
provider.reviews_workflow = Workflows.PRE_MODERATION.value
provider.save()
return provider
@pytest.fixture
def moderated_registration(self, provider):
return factories.RegistrationFactory(provider=provider, is_public=True)
@pytest.fixture
def withdraw_action(self, moderated_registration):
action = RegistrationAction.objects.create(
creator=moderated_registration.creator,
target=moderated_registration,
trigger=RegistrationModerationTriggers.REQUEST_WITHDRAWAL.db_name,
from_state=RegistrationModerationStates.ACCEPTED.db_name,
to_state=RegistrationModerationStates.PENDING_WITHDRAW.db_name,
comment='yo'
)
action.save()
return action
@pytest.fixture
def withdraw_action_for_retraction(self, retraction):
action = RegistrationAction.objects.create(
creator=retraction.target_registration.creator,
target=retraction.target_registration,
trigger=RegistrationModerationTriggers.REQUEST_WITHDRAWAL.db_name,
from_state=RegistrationModerationStates.ACCEPTED.db_name,
to_state=RegistrationModerationStates.PENDING_WITHDRAW.db_name,
comment='yo'
)
action.save()
return action
def test_embargo_states(self, embargo):
registration = embargo.target_registration
embargo.to_UNAPPROVED()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.INITIAL.db_name
embargo.to_PENDING_MODERATION()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.PENDING.db_name
embargo.to_APPROVED()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.EMBARGO.db_name
embargo.to_COMPLETED()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.ACCEPTED.db_name
embargo.to_MODERATOR_REJECTED()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.REJECTED.db_name
embargo.to_REJECTED()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.REVERTED.db_name
def test_registration_approval_states(self, registration_approval):
registration = registration_approval.target_registration
registration_approval.to_UNAPPROVED()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.INITIAL.db_name
registration_approval.to_PENDING_MODERATION()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.PENDING.db_name
registration_approval.to_APPROVED()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.ACCEPTED.db_name
registration_approval.to_MODERATOR_REJECTED()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.REJECTED.db_name
registration_approval.to_REJECTED()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.REVERTED.db_name
def test_retraction_states_over_registration_approval(self, registration_approval, withdraw_action):
registration = registration_approval.target_registration
registration.is_public = True
retraction = registration.retract_registration(registration.creator, justification='test')
registration_approval.to_APPROVED()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.PENDING_WITHDRAW_REQUEST.db_name
retraction.to_PENDING_MODERATION()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.PENDING_WITHDRAW.db_name
retraction.to_APPROVED()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.WITHDRAWN.db_name
retraction.to_MODERATOR_REJECTED()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.ACCEPTED.db_name
retraction.to_REJECTED()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.ACCEPTED.db_name
def test_retraction_states_over_embargo(self, embargo):
registration = embargo.target_registration
retraction = registration.retract_registration(user=registration.creator, justification='test')
embargo.to_APPROVED()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.PENDING_WITHDRAW_REQUEST.db_name
retraction.to_PENDING_MODERATION()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.PENDING_WITHDRAW.db_name
retraction.to_APPROVED()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.WITHDRAWN.db_name
retraction.to_MODERATOR_REJECTED()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.EMBARGO.db_name
retraction.to_REJECTED()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.EMBARGO.db_name
embargo.to_COMPLETED()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.ACCEPTED.db_name
retraction.to_MODERATOR_REJECTED()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.ACCEPTED.db_name
def test_embargo_termination_states(self, embargo_termination):
registration = embargo_termination.target_registration
assert registration.moderation_state == RegistrationModerationStates.PENDING_EMBARGO_TERMINATION.db_name
embargo_termination.to_REJECTED()
registration.update_moderation_state()
assert registration.moderation_state == RegistrationModerationStates.EMBARGO.db_name
embargo_termination.to_APPROVED()
registration.update_moderation_state()
assert registration.moderation_state == RegistrationModerationStates.ACCEPTED.db_name
def test_retraction_states_over_embargo_termination(self, embargo_termination):
registration = embargo_termination.target_registration
embargo_termination.accept()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.ACCEPTED.db_name
retraction = registration.retract_registration(user=registration.creator, justification='because')
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.PENDING_WITHDRAW_REQUEST.db_name
retraction.to_PENDING_MODERATION()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.PENDING_WITHDRAW.db_name
retraction.to_APPROVED()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.WITHDRAWN.db_name
retraction.to_MODERATOR_REJECTED()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.ACCEPTED.db_name
retraction.to_REJECTED()
registration.refresh_from_db()
assert registration.moderation_state == RegistrationModerationStates.ACCEPTED.db_name
class TestForcedWithdrawal():
@pytest.fixture
def embargo_termination(self):
return factories.EmbargoTerminationApprovalFactory()
@pytest.fixture
def moderator(self):
return factories.AuthUserFactory()
@pytest.fixture
def provider(self, moderator):
provider = factories.RegistrationProviderFactory()
update_provider_auth_groups()
provider.get_group('moderator').user_set.add(moderator)
provider.reviews_workflow = Workflows.PRE_MODERATION.value
provider.save()
return provider
@pytest.fixture
def moderated_registration(self, provider):
registration = factories.RegistrationFactory(provider=provider, is_public=True)
# Move to implicit ACCEPTED state
registration.update_moderation_state()
return registration
@pytest.fixture
def unmoderated_registration(self):
registration = factories.RegistrationFactory(is_public=True)
# Move to implicit ACCEPTED state
registration.update_moderation_state()
return registration
def test_force_retraction_changes_state(self, moderated_registration, moderator):
moderated_registration.retract_registration(
user=moderator, justification='because', moderator_initiated=True)
moderated_registration.refresh_from_db()
assert moderated_registration.is_retracted
assert moderated_registration.retraction.approval_stage is ApprovalStates.APPROVED
assert moderated_registration.moderation_state == RegistrationModerationStates.WITHDRAWN.db_name
def test_force_retraction_writes_action(self, moderated_registration, moderator):
justification = 'because power'
moderated_registration.retract_registration(
user=moderator, justification=justification, moderator_initiated=True)
expected_justification = 'Force withdrawn by moderator: ' + justification
assert moderated_registration.retraction.justification == expected_justification
action = RegistrationAction.objects.last()
assert action.trigger == RegistrationModerationTriggers.FORCE_WITHDRAW.db_name
assert action.comment == expected_justification
assert action.from_state == RegistrationModerationStates.ACCEPTED.db_name
assert action.to_state == RegistrationModerationStates.WITHDRAWN.db_name
def test_cannot_force_retraction_on_unmoderated_registration(self):
unmoderated_registration = factories.RegistrationFactory(is_public=True)
with assert_raises(ValueError):
unmoderated_registration.retract_registration(
user=unmoderated_registration.creator, justification='', moderator_initiated=True)
def test_nonmoderator_cannot_force_retraction(self, moderated_registration):
with assert_raises(PermissionsError):
moderated_registration.retract_registration(
user=moderated_registration.creator, justification='', moderator_initiated=True)
assert moderated_registration.retraction is None
assert moderated_registration.moderation_state == RegistrationModerationStates.ACCEPTED.db_name
| mfraezz/osf.io | osf_tests/test_registrations.py | Python | apache-2.0 | 37,104 |
from .base import *
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
DEBUG = False
# TODO: Separate SECRET_KEY for production! | emmceemoore/ospi-website | project/settings/production.py | Python | mit | 215 |
# Copyright (C) 2010-2019 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Visualize the system cells and MPI domains. Run ESPResSo in parallel
to color particles by node.
"""
import espressomd
from espressomd.visualization_opengl import openGLLive
import numpy as np
required_features = ["LENNARD_JONES"]
espressomd.assert_features(required_features)
box = [40, 30, 20]
system = espressomd.System(box_l=box)
system.set_random_state_PRNG()
visualizer = openGLLive(
system,
window_size=[800, 800],
background_color=[0, 0, 0],
camera_position=[20, 15, 80],
particle_coloring='node',
draw_nodes=True,
draw_cells=True)
system.time_step = 0.0005
system.cell_system.set_domain_decomposition(use_verlet_lists=True)
system.cell_system.skin = 0.4
for i in range(100):
system.part.add(pos=box * np.random.random(3))
system.non_bonded_inter[0, 0].lennard_jones.set_params(
epsilon=100.0, sigma=1.0, cutoff=3.0, shift="auto")
energy = system.analysis.energy()
print("Before Minimization: E_total = {}".format(energy['total']))
system.minimize_energy.init(
f_max=50, gamma=30.0, max_steps=10000, max_displacement=0.001)
system.minimize_energy.minimize()
energy = system.analysis.energy()
print("After Minimization: E_total = {}".format(energy['total']))
print("Tune skin")
system.cell_system.tune_skin(0.1, 4.0, 1e-1, 1000)
print(system.cell_system.get_state())
system.thermostat.set_langevin(kT=1, gamma=1, seed=42)
visualizer.run(1)
| psci2195/espresso-ffans | samples/visualization_cellsystem.py | Python | gpl-3.0 | 2,111 |
#!/usr/bin/python
import portage, os, sys
kw=portage.settings["ACCEPT_KEYWORDS"].split()
root=portage.settings["ROOT"]
outfile=os.path.join(root,"/etc/portage/make.profile/parent")
oldlink=os.path.join(root,"/etc/make.profile")
if not os.path.lexists(oldlink) and os.path.exists(outfile):
print("System appears upgraded to new profile system.")
sys.exit(0)
if "~amd64" in kw:
new_arch = "x86-64bit"
new_build = "current"
elif "amd64" in kw:
new_arch = "x86-64bit"
new_build = "stable"
elif "~x86" in kw:
new_arch = "x86-32bit"
new_build = "current"
elif "x86" in kw:
new_arch = "x86-32bit"
new_build = "stable"
else:
print("Couldn't determine system architecture and build. Please upgrade to new system manually.")
sys.exit(1)
of=os.readlink(oldlink).split("/")[-1]
if of in [ "server", "desktop" ]:
flavor=of
else:
flavor="core"
print("Detected architecture %s, build %s" % ( new_arch, new_build ))
try:
if not os.path.exists(os.path.dirname(outfile)):
os.makedirs(os.path.dirname(outfile))
pf=open(outfile,"w")
pf.write("gentoo:funtoo/1.0/linux-gnu/arch/%s\n" % new_arch )
pf.write("gentoo:funtoo/1.0/linux-gnu/build/%s\n" % new_build )
pf.write("gentoo:funtoo/1.0/linux-gnu/flavor/%s\n" % flavor )
pf.close()
if os.path.lexists(oldlink):
os.unlink(oldlink)
except IOError:
print("Encountered error when upgrading to new profile system.")
sys.exit(2)
print("Upgraded to new profile system.")
| apinsard/funtoo-overlay | sys-apps/portage/files/pygrade.py | Python | gpl-2.0 | 1,424 |
import unittest
import unittest.mock
import pyramid.testing
class AccountControllerTests(unittest.TestCase):
def test_register_validation_valid(self):
# 3 A's of test: Arrange, Act, then Assert
# Arrange
from nflpool.viewmodels.register_viewmodel import RegisterViewModel
data = {
"first_name": "Paul",
"last_name": "Cutler",
"email": "[email protected]",
"password": "Aa123456@",
"confirm_password": "Aa123456@",
}
# noinspection PyTypeChecker
vm = RegisterViewModel()
vm.from_dict(data)
# Act
target = "nflpool.services.account_service.AccountService.find_account_by_email"
with unittest.mock.patch(target, return_value=None):
vm.validate()
# Assert:
self.assertIsNone(vm.error)
def test_register_validation_existing_user(self):
# Arrange
from nflpool.viewmodels.register_viewmodel import RegisterViewModel
from nflpool.data.account import Account
data = {
"first_name": "Paul",
"last_name": "Cutler",
"email": "[email protected]",
"password": "Aa123456@",
"confirm_password": "Aa123456@",
}
# noinspection PyTypeChecker
vm = RegisterViewModel()
vm.from_dict(data)
# Act
target = "nflpool.services.account_service.AccountService.find_account_by_email"
with unittest.mock.patch(target, return_value=Account()):
vm.validate()
# Assert:
self.assertIsNotNone(vm.error)
self.assertTrue("exist" in vm.error)
def test_register_validation_no_password(self):
# Arrange
from nflpool.viewmodels.register_viewmodel import RegisterViewModel
data = {
"first_name": "Paul",
"last_name": "Cutler",
"email": "[email protected]",
"password": "",
"confirm_password": "",
}
# noinspection PyTypeChecker
vm = RegisterViewModel()
vm.from_dict(data)
# Act
vm.validate()
# Assert:
self.assertIsNotNone(vm.error)
self.assertTrue("password" in vm.error)
def test_register_validation_no_email(self):
# Arrange
from nflpool.viewmodels.register_viewmodel import RegisterViewModel
data = {
"first_name": "Paul",
"last_name": "Cutler",
"email": "",
"password": "Aa123456@",
"confirm_password": "Aa123456@",
}
# noinspection PyTypeChecker
vm = RegisterViewModel()
vm.from_dict(data)
# Act
vm.validate()
# Assert:
self.assertIsNotNone(vm.error)
self.assertTrue("email" in vm.error)
| prcutler/nflpool | tests/services/test_account_service.py | Python | mit | 2,875 |
"""
WSGI config for techfest project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "techfest.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "techfest.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| sheeshmohsin/techfest_starter | techfest/techfest/wsgi.py | Python | mit | 1,425 |
import os
import toolz as tz
from collections import namedtuple
from bcbio import utils
from bcbio.pipeline import datadict as dd
from bcbio.pipeline import config_utils
from bcbio.log import logger
from bcbio.distributed.transaction import file_transaction
from bcbio.provenance import do
from bcbio import bam
from bcbio.rnaseq import gtf
from bcbio.heterogeneity import chromhacks
# ranges taken from Buenrostro, Nat. Methods 10, 1213–1218 (2013).
ATACRange = namedtuple('ATACRange', ['label', 'min', 'max'])
ATACRanges = {"NF": ATACRange("NF", 0, 100),
"MN": ATACRange("MN", 180, 247),
"DN": ATACRange("DN", 315, 473),
"TN": ATACRange("TN", 558, 615)}
def calculate_complexity_metrics(work_bam, data):
"""
the work_bam should have duplicates marked but not removed
mitochondrial reads should be removed
"""
bedtools = config_utils.get_program("bedtools", dd.get_config(data))
work_dir = dd.get_work_dir(data)
metrics_dir = os.path.join(work_dir, "metrics", "atac")
utils.safe_makedir(metrics_dir)
metrics_file = os.path.join(metrics_dir,
f"{dd.get_sample_name(data)}-atac-metrics.csv")
# complexity metrics only make sense for paired-end reads
if not bam.is_paired(work_bam):
return data
if utils.file_exists(metrics_file):
data = tz.assoc_in(data, ['atac', 'complexity_metrics_file'], metrics_file)
return data
# BAM file must be sorted by read name
work_bam = bam.sort(work_bam, dd.get_config(data), order="queryname")
with file_transaction(metrics_file) as tx_metrics_file:
with open(tx_metrics_file, "w") as out_handle:
out_handle.write("mt,m0,m1,m2\n")
cmd = (f"{bedtools} bamtobed -bedpe -i {work_bam} | "
"awk 'BEGIN{OFS=\"\\t\"}{print $1,$2,$4,$6,$9,$10}' | "
"sort | "
"uniq -c | "
"awk 'BEGIN{mt=0;m0=0;m1=0;m2=0}($1==1){m1=m1+1} "
"($1==2){m2=m2+1}{m0=m0+1}{mt=mt+$1}END{printf \"%d,%d,%d,%d\\n\", mt,m0,m1,m2}' >> "
f"{tx_metrics_file}")
message = f"Calculating ATAC-seq complexity metrics on {work_bam}, saving as {metrics_file}."
do.run(cmd, message)
data = tz.assoc_in(data, ['atac', 'complexity_metrics_file'], metrics_file)
return data
def calculate_encode_complexity_metrics(data):
metrics_file = tz.get_in(['atac', 'complexity_metrics_file'], data, None)
if not metrics_file:
return {}
else:
with open(metrics_file) as in_handle:
header = next(in_handle).strip().split(",")
values = next(in_handle).strip().split(",")
raw_metrics = {h: int(v) for h, v in zip(header, values)}
metrics = {"PBC1": raw_metrics["m1"] / raw_metrics["m0"],
"NRF": raw_metrics["m0"] / raw_metrics["mt"]}
if raw_metrics["m2"] == 0:
PBC2 = 0
else:
PBC2 = raw_metrics["m1"] / raw_metrics["m2"]
metrics["PBC2"] = PBC2
metrics["bottlenecking"] = get_bottlenecking_flag(metrics["PBC1"], metrics["PBC2"])
metrics["complexity"] = get_complexity_flag(metrics["NRF"])
return(metrics)
def get_bottlenecking_flag(PBC1, PBC2):
if PBC1 < 0.7 or PBC2 < 1:
return "severe"
elif PBC1 <= 0.9 or PBC2 <= 3:
return "moderate"
else:
return "none"
def get_complexity_flag(NRF):
if NRF < 0.7:
return "concerning"
elif NRF < 0.9:
return "acceptable"
else:
return "ideal"
def split_ATAC(data, bam_file=None):
"""
splits a BAM into nucleosome-free (NF) and mono/di/tri nucleosome BAMs based
on the estimated insert sizes
uses the current working BAM file if no BAM file is supplied
"""
sambamba = config_utils.get_program("sambamba", data)
num_cores = dd.get_num_cores(data)
base_cmd = f'{sambamba} view --format bam --nthreads {num_cores} '
bam_file = bam_file if bam_file else dd.get_work_bam(data)
out_stem = os.path.splitext(bam_file)[0]
split_files = {}
# we can only split these fractions from paired runs
if not bam.is_paired(bam_file):
split_files["full"] = bam_file
data = tz.assoc_in(data, ['atac', 'align'], split_files)
return data
for arange in ATACRanges.values():
out_file = f"{out_stem}-{arange.label}.bam"
if not utils.file_exists(out_file):
with file_transaction(out_file) as tx_out_file:
cmd = base_cmd +\
f'-F "template_length > {arange.min} and template_length < {arange.max}" ' +\
f'{bam_file} > {tx_out_file}'
message = f'Splitting {arange.label} regions from {bam_file}.'
do.run(cmd, message)
bam.index(out_file, dd.get_config(data))
split_files[arange.label] = out_file
split_files["full"] = bam_file
data = tz.assoc_in(data, ['atac', 'align'], split_files)
return data
def run_ataqv(data):
if not dd.get_chip_method(data) == "atac":
return None
work_dir = dd.get_work_dir(data)
sample_name = dd.get_sample_name(data)
out_dir = os.path.join(work_dir, "qc", sample_name, "ataqv")
peak_file = get_full_peaks(data)
bam_file = get_unfiltered_bam(data)
out_file = os.path.join(out_dir, sample_name + ".ataqv.json.gz")
if not peak_file:
logger.info(f"Full peak file for {sample_name} not found, skipping ataqv")
return None
if not bam_file:
logger.info(f"Unfiltered BAM file for {sample_name} not found, skipping ataqv")
return None
if utils.file_exists(out_file):
return out_file
tss_bed_file = os.path.join(out_dir, "TSS.bed")
tss_bed_file = gtf.get_tss_bed(dd.get_gtf_file(data), tss_bed_file, data, padding=0)
if chromhacks.is_human(data):
organism = "human"
autosomal_reference_flag = ""
elif chromhacks.is_mouse(data):
organism = "mouse"
autosomal_reference_flag = ""
else:
autosomal_reference = os.path.join(out_dir, "autosomal.txt")
autosomal_reference = _make_autosomal_reference_file(autosomal_reference, data)
organism = "None"
autosomal_reference_flag = f"--autosomal-reference-file {autosomal_reference} "
ataqv = config_utils.get_program("ataqv", data)
mitoname = chromhacks.get_mitochondrial_chroms(data)[0]
if not ataqv:
logger.info(f"ataqv executable not found, skipping running ataqv.")
return None
with file_transaction(out_file) as tx_out_file:
cmd = (f"{ataqv} --peak-file {peak_file} --name {sample_name} --metrics-file {tx_out_file} "
f"--tss-file {tss_bed_file} {autosomal_reference_flag} "
f"--ignore-read-groups --mitochondrial-reference-name {mitoname} "
f"--tss-extension 1000 "
f"{organism} {bam_file}")
message = f"Running ataqv on {sample_name}."
do.run(cmd, message)
return out_file
def _make_autosomal_reference_file(out_file, data):
"""
for many organisms we don't know in bcbio what chromosomes are what, for now include
everything non-mitochondrial
"""
if utils.file_exists(out_file):
return out_file
nonmito = chromhacks.get_nonmitochondrial_chroms(data)
with file_transaction(out_file) as tx_out_file:
with open(tx_out_file, "w") as out_handle:
for chrom in nonmito:
print(f"{chrom}", file=out_handle)
return out_file
def get_NF_bam(data):
"""
get the nucleosome free BAM file for ATAC-seq if it exists
"""
return tz.get_in(("atac", "align", "NF"), data, None)
def get_NF_peaks(data):
"""
get the nucleosome free peak file for ATAC-seq if it exists
"""
peak_files = tz.get_in(("peaks_files", "NF", "macs2"), data, [])
for f in peak_files:
if f.endswith("narrowPeak") or f.endswith("broadPeak"):
return f
return None
def get_unfiltered_bam(data):
"""
get the nucleosome free BAM file for ATAC-seq if it exists
"""
return tz.get_in(("chipseq", "align", "unfiltered"), data, None)
def get_full_peaks(data):
"""
get the nucleosome free peak file for ATAC-seq if it exists
"""
peak_files = tz.get_in(("peaks_files", "full", "macs2"), data, [])
for f in peak_files:
if f.endswith("narrowPeak") or f.endswith("broadPeak"):
return f
return None
def create_ataqv_report(samples):
"""
make the ataqv report from a set of ATAC-seq samples
"""
data = samples[0][0]
new_samples = []
reportdir = os.path.join(dd.get_work_dir(data), "qc", "ataqv")
sentinel = os.path.join(reportdir, "index.html")
if utils.file_exists(sentinel):
ataqv_output = {"base": sentinel, "secondary": get_ataqv_report_files(reportdir)}
new_data = []
for data in dd.sample_data_iterator(samples):
data = tz.assoc_in(data, ["ataqv_report"], ataqv_output)
new_data.append(data)
return dd.get_samples_from_datalist(new_data)
mkarv = config_utils.get_program("mkarv", dd.get_config(data))
ataqv_files = []
for data in dd.sample_data_iterator(samples):
qc = dd.get_summary_qc(data)
ataqv_file = tz.get_in(("ataqv", "base"), qc, None)
if ataqv_file and utils.file_exists(ataqv_file):
ataqv_files.append(ataqv_file)
if not ataqv_files:
return samples
ataqv_json_file_string = " ".join(ataqv_files)
with file_transaction(reportdir) as txreportdir:
cmd = f"{mkarv} {txreportdir} {ataqv_json_file_string}"
message = f"Creating ataqv report from {ataqv_json_file_string}."
do.run(cmd, message)
new_data = []
ataqv_output = {"base": sentinel, "secondary": get_ataqv_report_files(reportdir)}
for data in dd.sample_data_iterator(samples):
data = tz.assoc_in(data, ["ataqv_report"], ataqv_output)
new_data.append(data)
return dd.get_samples_from_datalist(new_data)
def get_ataqv_report_files(reportdir):
files = []
for r, d, f in os.walk(reportdir):
for file in f:
f = os.path.join(r, file)
if utils.file_exists(f):
files.append(f)
return files
| vladsaveliev/bcbio-nextgen | bcbio/chipseq/atac.py | Python | mit | 10,313 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
__title__ = "cryptography_polarssl"
__summary__ = ("cryptography_polarssl is a package that provides an "
"alternative backend implementation for cryptography based on"
"PolarSSL.")
__uri__ = "https://github.com/Ayrx/cryptography_polarssl"
__version__ = "0.1.dev1"
__author__ = "Terry Chia"
__email__ = "[email protected]"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2014 %s" % __author__
| Ayrx/cryptography_polarssl | cryptography_polarssl/__about__.py | Python | apache-2.0 | 1,201 |
#!/usr/bin/python2
'''
This example explores the 'input' function in python2
which is different from the 'input' function in python3.
Notes:
- 'input' in python2 does an 'eval' on the input from
the console.
- this makes this function inherently unsafe.
- this also make it unpredictable (try to put a float,
or a double or whatever).
- I don't like this function in python2.
'''
from __future__ import print_function
text = input('please give me a number: ')
print('type of return from input is', type(text))
print('value returned from input is', text)
| nonZero/demos-python | src/examples/short/console/python2_input.py | Python | gpl-3.0 | 557 |
class deciderType:
"""DeciderType class"""
# class template for all future specific "deciders" to implement
def __init__(self): # class constructor
import log
self.myLog = log.Log('decider.log')
self.myLog.add('DeciderType initialized')
self.deciderName = "DeciderType"
##### sanity check variables #####
from datetime import datetime
self.startTime = datetime.now()
self.stopTime = datetime.now()
self.duration = 0
self.clockRunning = False
self.consequtive = 3 # minimum number of hits in a row before we think it's a real noise
self.minimumTime = 5 # minimum time in seconds for reporting a noise
self.maximumTime = 30 # maximum time in seconds for reporting a noise
"""
The 'listenerHistory' is based on a single moment of audio capture data.
It could 'hear' a noise based on dB level, frequency correlation or other values
The 'decisionHistory' is based on other factors or sanity checks.
For example, while the listener could think there is a noise,
a sanity check could be the fact that we wear that noise three consequtive times.
Until we 'hear' it three times in a row, we do not 'decide' there is a noise.
"""
self.listenerHistory = [] # history of the Listener hearing a noise in the moment
self.decisionHistory = [] # history of this Decider determining there is a noise after sanity checks
# these should both be the same value
self.listenerHistoryMax = 20
self.decisionHistoryMax = 20
self.correlationData = []
self.startup()
def startup(self):
# this will run immediately after the constructor
# it is here so deciderTypeXYZ can have "startup" code without overriding the entire contstructor
pass
# def __del__(self):
# self.myLog.add('DeciderType ' + self.deciderName + ' closing down [destructor called]')
def setDeciderName(self,newName):
self.deciderName = newName
def isNoise(self, theListener): # returns true if there is a noise
# return False by default
self.myLog.add("ERROR - Decider (" + self.deviceName + ") did not implement isNoise()")
return False
def sanityCheckTimer(self): # return True if passes all sanity checks, False if not
self.calculateDuration()
# check if time is greater than X seconds
if self.getDuration() < self.minimumTime:
print('Decider.sanityCheckTimer() failed minimum time check (' + str(self.getDuration()) + ' seconds)')
self.myLog.add('Decider.sanityCheckTimer() failed minimum time check (' + str(self.getDuration()) + ' seconds)')
return False
# check if time is less than Y seconds
if self.duration > self.maximumTime:
print('Decider.sanityCheckTimer() failed maximum time check (' + str(self.duration) + ' seconds)')
self.myLog.add('Decider.sanityCheckTimer() failed maximum time check (' + str(self.duration) + ' seconds)')
return False
return True # passed all tests so return True
def startTimer(self): # start the reporting clock
from datetime import datetime
self.startTime = datetime.now()
self.clockRunning = True
def stopTimer(self):
from datetime import datetime
self.stopTime = datetime.now()
self.clockRunning = False
def calculateDuration(self):
from datetime import timedelta
d = self.stopTime - self.startTime
self.duration = int(d.total_seconds())
def calculateCurrentDuration(self):
# returns the current time duration from startTime until this moment
from datetime import datetime
from datetime import timedelta
currentTime = datetime.now()
d = currentTime - self.startTime
return int(d.total_seconds())
def getDuration(self):
# if the clock is running return the difference between startTime and now
# if the clock is not running return the difference between startTime and stopTime
if self.clockRunning:
return self.calculateCurrentDuration()
else:
self.calculateDuration()
return self.duration
def addListenerHistory(self, newEntry): # adds
if newEntry:
self.listenerHistory.append(1)
else:
self.listenerHistory.append(0)
if len(self.listenerHistory) > self.listenerHistoryMax:
self.listenerHistory = self.listenerHistory[-self.listenerHistoryMax:]
def addDecisionHistory(self, newEntry): # adds
if newEntry:
self.decisionHistory.append(1)
else:
self.decisionHistory.append(0)
if len(self.decisionHistory) > self.decisionHistoryMax:
self.decisionHistory = self.decisionHistory[-self.decisionHistoryMax:]
def sanityCheckConsequtive(self): # return true if we heard noise last X times
a = 0
if self.consequtive > len(self.listenerHistory):
return False
for x in self.listenerHistory[-self.consequtive:]:
if x:
a += 1
if a == self.consequtive:
return True
else:
return False
def wasNoise(self): # return True if there was a noise last time around
if len(self.decisionHistory) > 0: # return last item in the list
if self.decisionHistory[-1] == 1:
return True
return False
def heardNoise(self): # return True if the listener heard a noise last time around
if len(self.listenerHistory) > 0: # return last item in the list
if self.listenerHistory[-1] == 1:
return True
return False
def getCorrelationData(self): # read correlation data from a file
self.correlationData = [] # first clear out the data
if self.correlationFile == '':
self.myLog.add("ERROR: correlationFile was not defined")
exit()
import os
if os.path.isfile(self.correlationFile):
file = open(self.correlationFile,'r')
for line in file:
self.correlationData.append(float(line))
file.close()
else: # doesn't exist so create it
file = open(self.correlationFile,'w')
file.write("")
file.close()
# TODO check that there was actually data in the file
if not len(self.correlationData) > 0:
self.myLog.add("ERROR: there is no data in the correlationFile...use teach.py")
def saveCorrelationData(self, newfile, myData):
# save correlation data to a File
file = open(newfile,'w') # open the file in write mode
for x in range(len(myData)):
file.write(str(myData[x]) + '\n')
file.close() # be nice and close out the file
| austinmroczek/pauser | deciderType.py | Python | mit | 7,295 |
'''
Check for primality for both decimal inputs and binary inputs
'''
lst = [2, 4, 6, 7, 9, 13, 17, 99, 127, 139]
print lst
prime = []
def is_composite(n) :
for i in range(2, n/2 + 1) :
if n % i == 0 :
return True
return False
for n in lst :
if is_composite(n) :
continue
else :
prime.append(n)
print prime
print "Enter number to check for Primality : "
no = raw_input()
if not is_composite(int(no, 2)):
print "Entered number is prime"
else :
print "Entered number is composite"
| srinivasanmit/all-in-all | puzzles/isprime.py | Python | gpl-3.0 | 548 |
dictionary = {'a': 0, 'b': 1, 'c': 2, 'd': 3, 'e': 4, 'f': 5, 'g': 6, 'h': 7, 'i': 8, 'j': 9, 'k': 10, 'l': 11, 'm': 12,
'n': 13, 'o': 14, 'p': 15, 'q': 16, 'r': 17, 's': 18, 't': 19, 'u': 20, 'v': 21, 'w': 22, 'x': 23,
'y': 24, 'z': 25, ' ': 26, '?': 27, '.': 28}
CONST_SIZE = 29
def get_matrix(word, key_d):
matrix = []
size = len(word)
s_list = [dictionary[word[0]]]
for i in range(1, size):
index = dictionary[word[i]]
if i % key_d != 0:
s_list.append(index)
else:
matrix.append(s_list)
s_list = [dictionary[word[i]]]
matrix.append(s_list)
return matrix
'''
Function for String Encryption
'''
def encrypt(f_key, plain_mat):
rows = len(plain_mat)
cols = len(plain_mat[0])
# print (str(rows) + " " + str(cols))
rows_key = len(f_key)
enc_msg = []
for it in range(0, rows):
row_value = []
for j in range(0, cols):
val = 0
for k in range(0, rows_key):
# print(str(plain_mat[it][j]) + " " + str(f_key[k][j]))
val += (plain_mat[it][k] * f_key[k][j])
row_value.append(val % CONST_SIZE)
enc_msg.append(row_value)
return enc_msg
'''
Function for Getting Required Vakues from Dictionary
'''
def get_val_from_dict(num):
for char, code in dictionary.items():
if code == num:
return char
return ''
def get_string(matrix):
string = ""
cols = len(matrix[0])
rows = len(matrix)
for i in range(0, rows):
for j in range(0, cols):
# print(matrix[j][i])
string += (get_val_from_dict(matrix[i][j]))
return string
# Decryption Code Starts Here
def initialize(l):
mat = []
for i in range(0, l):
row = []
for j in range(0, l):
row.append(0)
mat.append(row)
return mat
def egcd(a, b):
if a == 0:
return b, 0, 1
else:
g, y, x = egcd(b % a, a)
return g, x - (b // a) * y, y
def modinv(a, m):
g, x, y = egcd(a, m)
if g != 1:
raise Exception('modular inverse does not exist')
else:
return x % m
def get_co_factor(mat, p, q, n):
size = n
temp = []
for i in range(0, size):
if i != p:
if q == 0:
list_t = mat[i][q + 1:size]
# print(mat[i])
else:
list_t = mat[i][0: q] + mat[i][q + 1:size]
# print(mat[i][0, q])
temp.append(list_t)
return temp
def determinant(matrix, size):
val = 0
if size == 1:
return matrix[0][0]
sign = 1
for i in range(0, size):
temp = get_co_factor(matrix, 0, i, size)
val += sign * matrix[0][i] * determinant(temp, size - 1)
sign = -sign
return val
def ad_joint(matrix):
rows = len(matrix)
cols = len(matrix[0])
adj = []
if rows == 1 and cols == 1:
adj.append([1])
return adj
adj = initialize(rows)
for i in range(0, rows):
for j in range(0, cols):
temp = get_co_factor(matrix, i, j, rows)
if (i + j) % 2 == 0:
sign = 1
else:
sign = -1
adj[j][i] = sign * determinant(temp, cols - 1)
return adj
def inverse_of_matrix(adjoint, size, det):
inverse_mat = []
for i in range(0, size):
inverse_row = []
for j in range(0, size):
val = (det * adjoint[i][j]) % CONST_SIZE
inverse_row.append(val)
inverse_mat.append(inverse_row)
return inverse_mat
def get_inverse(given_mat, size):
adjoint_of_mat = ad_joint(given_mat)
# print(adjoint_of_mat)
determinant_mat = determinant(given_mat, size)
# print(determinant_mat % CONST_SIZE)
mul_inverse_tuple = modinv(determinant_mat % CONST_SIZE, CONST_SIZE)
inverse_mat = inverse_of_matrix(adjoint_of_mat, size, mul_inverse_tuple)
# print("Inverse of Matrix")
return inverse_mat
def decrypt(f_key, plain_mat):
rows = len(plain_mat)
cols = len(plain_mat[0])
# print (str(rows) + " " + str(cols))
rows_key = len(f_key)
enc_msg = []
for it in range(0, rows):
row_value = []
for j in range(0, cols):
val = 0
for k in range(0, rows_key):
# print(str(plain_mat[it][j]) + " " + str(f_key[k][j]))
val += (plain_mat[it][k] * f_key[k][j])
row_value.append(val % CONST_SIZE)
enc_msg.append(row_value)
return enc_msg
# Main Code Starts Here
# Get the size of matrix
key_size = int(input())
key = []
# Get the matrix From Input
for itr in range(0, key_size):
r_vals = list(map(int, input().split()))
key.append(r_vals)
inverse_key = get_inverse(key, key_size)
# print(inverse_key)
# print(key)
t = int(input())
while t > 0:
line = input()
# e stands for encryption See INPUT Format Below
if line[0] == 'e':
plaintext = line[2:]
while len(plaintext) % key_size != 0:
plaintext += 'x'
# print(plaintext)
plain_matrix = get_matrix(plaintext, key_size)
# print(plain_matrix)
encrypted_value = encrypt(key, plain_matrix)
# print(encrypted_value)
encryped_string = get_string(encrypted_value)
print("Encrypted String:" + encryped_string)
# d stands for decryption See INPUT Format Below
elif line[0] == 'd':
ciphertext = line[2:]
cipher_matrix = get_matrix(ciphertext, key_size)
# print(cipher_matrix)
decrypted_value = decrypt(inverse_key, cipher_matrix)
decrypted_string = get_string(decrypted_value)
# print(decrypted_value)
print("Decrypted String: " + decrypted_string)
t -= 1
'''
3
21 18 19
17 3 7
5 2 28
2
e hi. how are you?
d rdqkyhfkuewgbvmavn'
''' | hacktoberfest17/programming | encryption_algorithms/python/Hill_Cipher_Encryption_Decryption.py | Python | gpl-3.0 | 5,908 |
import re
from django import forms
from django.shortcuts import redirect
from django.core.urlresolvers import reverse
from django.forms import formsets, ValidationError
from django.views.generic import TemplateView
from django.utils.datastructures import SortedDict
from django.utils.decorators import classonlymethod
from django.contrib.formtools.wizard.storage import get_storage
from django.contrib.formtools.wizard.storage.exceptions import NoFileStorageConfigured
from django.contrib.formtools.wizard.forms import ManagementForm
def normalize_name(name):
new = re.sub('(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))', '_\\1', name)
return new.lower().strip('_')
class StepsHelper(object):
def __init__(self, wizard):
self._wizard = wizard
def __dir__(self):
return self.all
def __len__(self):
return self.count
def __repr__(self):
return '<StepsHelper for %s (steps: %s)>' % (self._wizard, self.all)
@property
def all(self):
"Returns the names of all steps/forms."
return self._wizard.get_form_list().keys()
@property
def count(self):
"Returns the total number of steps/forms in this the wizard."
return len(self.all)
@property
def current(self):
"""
Returns the current step. If no current step is stored in the
storage backend, the first step will be returned.
"""
return self._wizard.storage.current_step or self.first
@property
def first(self):
"Returns the name of the first step."
return self.all[0]
@property
def last(self):
"Returns the name of the last step."
return self.all[-1]
@property
def next(self):
"Returns the next step."
return self._wizard.get_next_step()
@property
def prev(self):
"Returns the previous step."
return self._wizard.get_prev_step()
@property
def index(self):
"Returns the index for the current step."
return self._wizard.get_step_index()
@property
def step0(self):
return int(self.index)
@property
def step1(self):
return int(self.index) + 1
class WizardView(TemplateView):
"""
The WizardView is used to create multi-page forms and handles all the
storage and validation stuff. The wizard is based on Django's generic
class based views.
"""
storage_name = None
form_list = None
initial_dict = None
instance_dict = None
condition_dict = None
template_name = 'formtools/wizard/wizard_form.html'
def __repr__(self):
return '<%s: forms: %s>' % (self.__class__.__name__, self.form_list)
@classonlymethod
def as_view(cls, *args, **kwargs):
"""
This method is used within urls.py to create unique formwizard
instances for every request. We need to override this method because
we add some kwargs which are needed to make the formwizard usable.
"""
initkwargs = cls.get_initkwargs(*args, **kwargs)
return super(WizardView, cls).as_view(**initkwargs)
@classmethod
def get_initkwargs(cls, form_list, initial_dict=None,
instance_dict=None, condition_dict=None, *args, **kwargs):
"""
Creates a dict with all needed parameters for the form wizard instances.
* `form_list` - is a list of forms. The list entries can be single form
classes or tuples of (`step_name`, `form_class`). If you pass a list
of forms, the formwizard will convert the class list to
(`zero_based_counter`, `form_class`). This is needed to access the
form for a specific step.
* `initial_dict` - contains a dictionary of initial data dictionaries.
The key should be equal to the `step_name` in the `form_list` (or
the str of the zero based counter - if no step_names added in the
`form_list`)
* `instance_dict` - contains a dictionary of instance objects. This list
is only used when `ModelForm`s are used. The key should be equal to
the `step_name` in the `form_list`. Same rules as for `initial_dict`
apply.
* `condition_dict` - contains a dictionary of boolean values or
callables. If the value of for a specific `step_name` is callable it
will be called with the formwizard instance as the only argument.
If the return value is true, the step's form will be used.
"""
kwargs.update({
'initial_dict': initial_dict or {},
'instance_dict': instance_dict or {},
'condition_dict': condition_dict or {},
})
init_form_list = SortedDict()
assert len(form_list) > 0, 'at least one form is needed'
# walk through the passed form list
for i, form in enumerate(form_list):
if isinstance(form, (list, tuple)):
# if the element is a tuple, add the tuple to the new created
# sorted dictionary.
init_form_list[unicode(form[0])] = form[1]
else:
# if not, add the form with a zero based counter as unicode
init_form_list[unicode(i)] = form
# walk through the ne created list of forms
for form in init_form_list.itervalues():
if issubclass(form, formsets.BaseFormSet):
# if the element is based on BaseFormSet (FormSet/ModelFormSet)
# we need to override the form variable.
form = form.form
# check if any form contains a FileField, if yes, we need a
# file_storage added to the formwizard (by subclassing).
for field in form.base_fields.itervalues():
if (isinstance(field, forms.FileField) and
not hasattr(cls, 'file_storage')):
raise NoFileStorageConfigured
# build the kwargs for the formwizard instances
kwargs['form_list'] = init_form_list
return kwargs
def get_wizard_name(self):
return normalize_name(self.__class__.__name__)
def get_prefix(self):
# TODO: Add some kind of unique id to prefix
return self.wizard_name
def get_form_list(self):
"""
This method returns a form_list based on the initial form list but
checks if there is a condition method/value in the condition_list.
If an entry exists in the condition list, it will call/read the value
and respect the result. (True means add the form, False means ignore
the form)
The form_list is always generated on the fly because condition methods
could use data from other (maybe previous forms).
"""
form_list = SortedDict()
for form_key, form_class in self.form_list.iteritems():
# try to fetch the value from condition list, by default, the form
# gets passed to the new list.
condition = self.condition_dict.get(form_key, True)
if callable(condition):
# call the value if needed, passes the current instance.
condition = condition(self)
if condition:
form_list[form_key] = form_class
return form_list
def dispatch(self, request, *args, **kwargs):
"""
This method gets called by the routing engine. The first argument is
`request` which contains a `HttpRequest` instance.
The request is stored in `self.request` for later use. The storage
instance is stored in `self.storage`.
After processing the request using the `dispatch` method, the
response gets updated by the storage engine (for example add cookies).
"""
# add the storage engine to the current formwizard instance
self.wizard_name = self.get_wizard_name()
self.prefix = self.get_prefix()
self.storage = get_storage(self.storage_name, self.prefix, request,
getattr(self, 'file_storage', None))
self.steps = StepsHelper(self)
response = super(WizardView, self).dispatch(request, *args, **kwargs)
# update the response (e.g. adding cookies)
self.storage.update_response(response)
return response
def get(self, request, *args, **kwargs):
"""
This method handles GET requests.
If a GET request reaches this point, the wizard assumes that the user
just starts at the first step or wants to restart the process.
The data of the wizard will be resetted before rendering the first step.
"""
self.storage.reset()
# reset the current step to the first step.
self.storage.current_step = self.steps.first
return self.render(self.get_form())
def post(self, *args, **kwargs):
"""
This method handles POST requests.
The wizard will render either the current step (if form validation
wasn't successful), the next step (if the current step was stored
successful) or the done view (if no more steps are available)
"""
# Look for a wizard_prev_step element in the posted data which
# contains a valid step name. If one was found, render the requested
# form. (This makes stepping back a lot easier).
wizard_prev_step = self.request.POST.get('wizard_prev_step', None)
if wizard_prev_step and wizard_prev_step in self.get_form_list():
self.storage.current_step = wizard_prev_step
form = self.get_form(
data=self.storage.get_step_data(self.steps.current),
files=self.storage.get_step_files(self.steps.current))
return self.render(form)
# Check if form was refreshed
management_form = ManagementForm(self.request.POST, prefix=self.prefix)
if not management_form.is_valid():
raise ValidationError(
'ManagementForm data is missing or has been tampered.')
form_current_step = management_form.cleaned_data['current_step']
if (form_current_step != self.steps.current and
self.storage.current_step is not None):
# form refreshed, change current step
self.storage.current_step = form_current_step
# get the form for the current step
form = self.get_form(data=self.request.POST, files=self.request.FILES)
# and try to validate
if form.is_valid():
# if the form is valid, store the cleaned data and files.
self.storage.set_step_data(self.steps.current, self.process_step(form))
self.storage.set_step_files(self.steps.current, self.process_step_files(form))
# check if the current step is the last step
if self.steps.current == self.steps.last:
# no more steps, render done view
return self.render_done(form, **kwargs)
else:
# proceed to the next step
return self.render_next_step(form)
return self.render(form)
def render_next_step(self, form, **kwargs):
"""
THis method gets called when the next step/form should be rendered.
`form` contains the last/current form.
"""
# get the form instance based on the data from the storage backend
# (if available).
next_step = self.steps.next
new_form = self.get_form(next_step,
data=self.storage.get_step_data(next_step),
files=self.storage.get_step_files(next_step))
# change the stored current step
self.storage.current_step = next_step
return self.render(new_form, **kwargs)
def render_done(self, form, **kwargs):
"""
This method gets called when all forms passed. The method should also
re-validate all steps to prevent manipulation. If any form don't
validate, `render_revalidation_failure` should get called.
If everything is fine call `done`.
"""
final_form_list = []
# walk through the form list and try to validate the data again.
for form_key in self.get_form_list():
form_obj = self.get_form(step=form_key,
data=self.storage.get_step_data(form_key),
files=self.storage.get_step_files(form_key))
if not form_obj.is_valid():
return self.render_revalidation_failure(form_key, form_obj, **kwargs)
final_form_list.append(form_obj)
# render the done view and reset the wizard before returning the
# response. This is needed to prevent from rendering done with the
# same data twice.
done_response = self.done(final_form_list, **kwargs)
self.storage.reset()
return done_response
def get_form_prefix(self, step=None, form=None):
"""
Returns the prefix which will be used when calling the actual form for
the given step. `step` contains the step-name, `form` the form which
will be called with the returned prefix.
If no step is given, the form_prefix will determine the current step
automatically.
"""
if step is None:
step = self.steps.current
return str(step)
def get_form_initial(self, step):
"""
Returns a dictionary which will be passed to the form for `step`
as `initial`. If no initial data was provied while initializing the
form wizard, a empty dictionary will be returned.
"""
return self.initial_dict.get(step, {})
def get_form_instance(self, step):
"""
Returns a object which will be passed to the form for `step`
as `instance`. If no instance object was provied while initializing
the form wizard, None be returned.
"""
return self.instance_dict.get(step, None)
def get_form_kwargs(self, step=None):
"""
Returns the keyword arguments for instantiating the form
(or formset) on given step.
"""
return {}
def get_form(self, step=None, data=None, files=None):
"""
Constructs the form for a given `step`. If no `step` is defined, the
current step will be determined automatically.
The form will be initialized using the `data` argument to prefill the
new form. If needed, instance or queryset (for `ModelForm` or
`ModelFormSet`) will be added too.
"""
if step is None:
step = self.steps.current
# prepare the kwargs for the form instance.
kwargs = self.get_form_kwargs(step)
kwargs.update({
'data': data,
'files': files,
'prefix': self.get_form_prefix(step, self.form_list[step]),
'initial': self.get_form_initial(step),
})
if issubclass(self.form_list[step], forms.ModelForm):
# If the form is based on ModelForm, add instance if available.
kwargs.update({'instance': self.get_form_instance(step)})
elif issubclass(self.form_list[step], forms.models.BaseModelFormSet):
# If the form is based on ModelFormSet, add queryset if available.
kwargs.update({'queryset': self.get_form_instance(step)})
return self.form_list[step](**kwargs)
def process_step(self, form):
"""
This method is used to postprocess the form data. By default, it
returns the raw `form.data` dictionary.
"""
return self.get_form_step_data(form)
def process_step_files(self, form):
"""
This method is used to postprocess the form files. By default, it
returns the raw `form.files` dictionary.
"""
return self.get_form_step_files(form)
def render_revalidation_failure(self, step, form, **kwargs):
"""
Gets called when a form doesn't validate when rendering the done
view. By default, it changed the current step to failing forms step
and renders the form.
"""
self.storage.current_step = step
return self.render(form, **kwargs)
def get_form_step_data(self, form):
"""
Is used to return the raw form data. You may use this method to
manipulate the data.
"""
return form.data
def get_form_step_files(self, form):
"""
Is used to return the raw form files. You may use this method to
manipulate the data.
"""
return form.files
def get_all_cleaned_data(self):
"""
Returns a merged dictionary of all step cleaned_data dictionaries.
If a step contains a `FormSet`, the key will be prefixed with formset
and contain a list of the formset' cleaned_data dictionaries.
"""
cleaned_data = {}
for form_key in self.get_form_list():
form_obj = self.get_form(
step=form_key,
data=self.storage.get_step_data(form_key),
files=self.storage.get_step_files(form_key)
)
if form_obj.is_valid():
if isinstance(form_obj.cleaned_data, (tuple, list)):
cleaned_data.update({
'formset-%s' % form_key: form_obj.cleaned_data
})
else:
cleaned_data.update(form_obj.cleaned_data)
return cleaned_data
def get_cleaned_data_for_step(self, step):
"""
Returns the cleaned data for a given `step`. Before returning the
cleaned data, the stored values are being revalidated through the
form. If the data doesn't validate, None will be returned.
"""
if step in self.form_list:
form_obj = self.get_form(step=step,
data=self.storage.get_step_data(step),
files=self.storage.get_step_files(step))
if form_obj.is_valid():
return form_obj.cleaned_data
return None
def get_next_step(self, step=None):
"""
Returns the next step after the given `step`. If no more steps are
available, None will be returned. If the `step` argument is None, the
current step will be determined automatically.
"""
if step is None:
step = self.steps.current
form_list = self.get_form_list()
key = form_list.keyOrder.index(step) + 1
if len(form_list.keyOrder) > key:
return form_list.keyOrder[key]
return None
def get_prev_step(self, step=None):
"""
Returns the previous step before the given `step`. If there are no
steps available, None will be returned. If the `step` argument is
None, the current step will be determined automatically.
"""
if step is None:
step = self.steps.current
form_list = self.get_form_list()
key = form_list.keyOrder.index(step) - 1
if key >= 0:
return form_list.keyOrder[key]
return None
def get_step_index(self, step=None):
"""
Returns the index for the given `step` name. If no step is given,
the current step will be used to get the index.
"""
if step is None:
step = self.steps.current
return self.get_form_list().keyOrder.index(step)
def get_context_data(self, form, *args, **kwargs):
"""
Returns the template context for a step. You can overwrite this method
to add more data for all or some steps. This method returns a
dictionary containing the rendered form step. Available template
context variables are:
* all extra data stored in the storage backend
* `form` - form instance of the current step
* `wizard` - the wizard instance itself
Example:
.. code-block:: python
class MyWizard(FormWizard):
def get_context_data(self, form, **kwargs):
context = super(MyWizard, self).get_context_data(form, **kwargs)
if self.steps.current == 'my_step_name':
context.update({'another_var': True})
return context
"""
context = super(WizardView, self).get_context_data(*args, **kwargs)
context.update(self.storage.extra_data)
context['wizard'] = {
'form': form,
'steps': self.steps,
'management_form': ManagementForm(prefix=self.prefix, initial={
'current_step': self.steps.current,
}),
}
return context
def render(self, form=None, **kwargs):
"""
Returns a ``HttpResponse`` containing a all needed context data.
"""
form = form or self.get_form()
context = self.get_context_data(form, **kwargs)
return self.render_to_response(context)
def done(self, form_list, **kwargs):
"""
This method muss be overrided by a subclass to process to form data
after processing all steps.
"""
raise NotImplementedError("Your %s class has not defined a done() "
"method, which is required." % self.__class__.__name__)
class SessionWizardView(WizardView):
"""
A WizardView with pre-configured SessionStorage backend.
"""
storage_name = 'django.contrib.formtools.wizard.storage.session.SessionStorage'
class CookieWizardView(WizardView):
"""
A WizardView with pre-configured CookieStorage backend.
"""
storage_name = 'django.contrib.formtools.wizard.storage.cookie.CookieStorage'
class NamedUrlWizardView(WizardView):
"""
A WizardView with URL named steps support.
"""
url_name = None
done_step_name = None
@classmethod
def get_initkwargs(cls, *args, **kwargs):
"""
We require a url_name to reverse URLs later. Additionally users can
pass a done_step_name to change the URL name of the "done" view.
"""
assert 'url_name' in kwargs, 'URL name is needed to resolve correct wizard URLs'
extra_kwargs = {
'done_step_name': kwargs.pop('done_step_name', 'done'),
'url_name': kwargs.pop('url_name'),
}
initkwargs = super(NamedUrlWizardView, cls).get_initkwargs(*args, **kwargs)
initkwargs.update(extra_kwargs)
assert initkwargs['done_step_name'] not in initkwargs['form_list'], \
'step name "%s" is reserved for "done" view' % initkwargs['done_step_name']
return initkwargs
def get(self, *args, **kwargs):
"""
This renders the form or, if needed, does the http redirects.
"""
step_url = kwargs.get('step', None)
if step_url is None:
if 'reset' in self.request.GET:
self.storage.reset()
self.storage.current_step = self.steps.first
if self.request.GET:
query_string = "?%s" % self.request.GET.urlencode()
else:
query_string = ""
next_step_url = reverse(self.url_name, kwargs={
'step': self.steps.current,
}) + query_string
return redirect(next_step_url)
# is the current step the "done" name/view?
elif step_url == self.done_step_name:
last_step = self.steps.last
return self.render_done(self.get_form(step=last_step,
data=self.storage.get_step_data(last_step),
files=self.storage.get_step_files(last_step)
), **kwargs)
# is the url step name not equal to the step in the storage?
# if yes, change the step in the storage (if name exists)
elif step_url == self.steps.current:
# URL step name and storage step name are equal, render!
return self.render(self.get_form(
data=self.storage.current_step_data,
files=self.storage.current_step_data,
), **kwargs)
elif step_url in self.get_form_list():
self.storage.current_step = step_url
return self.render(self.get_form(
data=self.storage.current_step_data,
files=self.storage.current_step_data,
), **kwargs)
# invalid step name, reset to first and redirect.
else:
self.storage.current_step = self.steps.first
return redirect(self.url_name, step=self.steps.first)
def post(self, *args, **kwargs):
"""
Do a redirect if user presses the prev. step button. The rest of this
is super'd from FormWizard.
"""
prev_step = self.request.POST.get('wizard_prev_step', None)
if prev_step and prev_step in self.get_form_list():
self.storage.current_step = prev_step
return redirect(self.url_name, step=prev_step)
return super(NamedUrlWizardView, self).post(*args, **kwargs)
def render_next_step(self, form, **kwargs):
"""
When using the NamedUrlFormWizard, we have to redirect to update the
browser's URL to match the shown step.
"""
next_step = self.get_next_step()
self.storage.current_step = next_step
return redirect(self.url_name, step=next_step)
def render_revalidation_failure(self, failed_step, form, **kwargs):
"""
When a step fails, we have to redirect the user to the first failing
step.
"""
self.storage.current_step = failed_step
return redirect(self.url_name, step=failed_step)
def render_done(self, form, **kwargs):
"""
When rendering the done view, we have to redirect first (if the URL
name doesn't fit).
"""
if kwargs.get('step', None) != self.done_step_name:
return redirect(self.url_name, step=self.done_step_name)
return super(NamedUrlWizardView, self).render_done(form, **kwargs)
class NamedUrlSessionWizardView(NamedUrlWizardView):
"""
A NamedUrlWizardView with pre-configured SessionStorage backend.
"""
storage_name = 'django.contrib.formtools.wizard.storage.session.SessionStorage'
class NamedUrlCookieWizardView(NamedUrlWizardView):
"""
A NamedUrlFormWizard with pre-configured CookieStorageBackend.
"""
storage_name = 'django.contrib.formtools.wizard.storage.cookie.CookieStorage'
| lamby/live-studio | contrib/django/contrib/formtools/wizard/views.py | Python | agpl-3.0 | 26,571 |
"""Imports for Python API.
This file is MACHINE GENERATED! Do not edit.
Generated by: tensorflow/tools/api/generator/create_python_api.py script.
"""
from tensorflow.python.keras._impl.keras.estimator import model_to_estimator | ryfeus/lambda-packs | Keras_tensorflow_nightly/source2.7/tensorflow/tools/api/generator/api/keras/estimator/__init__.py | Python | mit | 227 |
"""
MagPy
GFZ input filter
supports Kp values from the qlyymm.tab
Written by Roman Leonhardt October 2012
- contains test, read and write function for hour data
ToDo: Filter for minute data
"""
from __future__ import print_function
from magpy.stream import *
def isGFZKP(filename):
"""
Checks whether a file is ASCII Data format
containing the GFZ Kp values
"""
try:
temp = open(filename, 'rt').readline()
except:
return False
try:
testdate = datetime.strptime(temp[:6],"%y%m%d")
except:
return False
try:
if not temp[6:8] == " ": # strip is important to remove eventual \r\n sequences or \n
return False
if not temp[9] in ['o','+','-']: # strip is important to remove eventual \r\n sequences or \n
return False
except:
return False
print('Found GFZ Kp format')
return True
def readGFZKP(filename, headonly=False, **kwargs):
"""
Reading GFZ format data.
contains 3 hours Kp values with sign, cumulative Kp
Looks like:
121001 7o 6o 4o 2+ 2+ 1+ 1o 2- 26- 34 1.3
121002 2+ 1- 1- 3o 2+ 2- 1+ 2+ 14+ 7 0.4
121003 3- 2+ 2- 1o 1- 0+ 1- 1+ 11- 6 0.2
121004 1- 1- 0+ 1- 0o 0o 0o 0+ 3- 2 0.0
"""
starttime = kwargs.get('starttime')
endtime = kwargs.get('endtime')
getfile = True
fh = open(filename, 'rt')
stream = DataStream()
# Check whether header infromation is already present
if stream.header is None:
headers = {}
else:
headers = stream.header
logging.info(' Read: %s Format: GFZ Kp' % (filename))
# read file and split text into channels
li,ld,lh,lx,ly,lz,lf = [],[],[],[],[],[],[]
code = ''
array = [[] for key in KEYLIST]
indvar1 = KEYLIST.index('var1')
indvar2 = KEYLIST.index('var2')
indvar3 = KEYLIST.index('var3')
indvar4 = KEYLIST.index('var4')
for line in fh:
elements = line.split()
getdat = True
try:
if len(elements[0])>4:
day = datetime.strptime(elements[0],"%y%m%d")
getdat = True
else:
getdat = False
except:
getdat = False
if line.isspace():
# blank line
pass
elif headonly:
# skip data for option headonly
continue
elif len(line) > 6 and getdat: # hour file
# skip data for option headonly
elements = line.split()
#try:
day = datetime.strptime(elements[0],"%y%m%d")
if len(elements) == 12:
cum = float(elements[9].strip('o').strip('-').strip('+'))
num = int(elements[10])
fum = float(elements[11])
else:
cum = float(NaN)
num = float(NaN)
fum = float(NaN)
if len(elements)>9:
endcount = 9
else:
endcount = len(elements)
for i in range (1,endcount):
#row = LineStruct()
signval = elements[i][1:]
if signval == 'o':
adderval = 0.0
elif signval == '-':
adderval = -0.33333333
elif signval == '+':
adderval = +0.33333333
array[indvar1].append(float(elements[i][:1])+adderval)
dt = i*3-1.5
array[0].append(date2num(day + timedelta(hours=dt)))
array[indvar2].append(cum)
array[indvar3].append(num)
array[indvar4].append(fum)
elif len(line) > 6 and not getdat: # monthly mean
if line.split()[1] == 'Mean':
means = line.split()
# Not used so far
monthlymeanap = means[2]
monthlymeancp = means[3]
pass
else:
print("Error while reading GFZ Kp format")
pass
fh.close()
array[0]=np.asarray(array[0])
array[indvar1]=np.asarray(array[indvar1])
array[indvar2]=np.asarray(array[indvar2])
array[indvar3]=np.asarray(array[indvar3])
array[indvar4]=np.asarray(array[indvar4])
# header info
headers['col-var1'] = 'Kp'
headers['col-var2'] = 'Sum Kp'
headers['col-var3'] = 'Ap'
headers['col-var4'] = 'Cp'
headers['DataSource'] = 'GFZ Potsdam'
headers['DataFormat'] = 'MagPyK'
headers['DataReferences'] = 'http://www-app3.gfz-potsdam.de/kp_index/'
return DataStream([LineStruct()], headers, np.asarray(array))
#return DataStream(stream, headers, np.asarray(array))
| geomagpy/magpy | magpy/lib/format_gfz.py | Python | bsd-3-clause | 4,709 |
# Import required modules
import os
# Import custom modules
import app
import input
import output
import ballot
import keys
import crypto
# Create a new ballot
def initaliseBallot():
# Get the name of the ballot
title = raw_input("Title of the ballot: ")
# Get the description of the ballot
description = raw_input("Description for the ballot: ")
# Get the ballot candidates
candidatesString = raw_input("Please enter the candidates separated by commas (','): ")
candidates = candidatesString.split(",")
# Get the voter addresses
votersString = raw_input("Please enter the voter addresses separated by commas (','): ")
voters = votersString.split(",")
# Get the ballot public key
print 'Select the key you want to use for the ballot.'
key = input.getKeySelection()
# Get the Ethereum account to use
print 'Select the account you want to use for the ballot.'
account = input.getAccountSelection()
# Initalise a ballot with the given values
ballot.initalise(account, title, description, candidates, voters, key)
# List the ballots in the console
def listBallots():
output.allBallots()
# Get more information about a ballot
def ballotInfo():
# Get the ballot
print 'Select the ballot you wish to see more information about.'
ballot = input.getBallotSelection()
# Display the ballot information
output.ballotInfo(ballot)
# Cast a vote
def castVote():
# Get the ballot
print 'Select the ballot you want to vote in.'
selectedBallot = input.getBallotSelection()
# Get the Ethereum account to use
print 'Select the account you want to use to vote.'
account = input.getAccountSelection()
# Display the ballot information
output.ballotInfo(selectedBallot, False, False)
# Get the candidate to vote for
print 'Select the candidate you wish to vote for.'
candidate = input.getCandidateSelection(selectedBallot)
# Build the vote to send
vote = ballot.buildVote(selectedBallot, candidate)
# Execute the vote
ballot.executeVote(selectedBallot, account, vote)
# Import a ballot
def importBallot():
# Get the path for the ballot file
filePath = raw_input("Please give the path to the ballot file: ")
# Import the ballot file at the given path
ballot.importBallot(filePath)
# Return control to the program
print 'Ballot at "' + filePath + '" saved.'
# Export a ballot
def exportBallot():
# Get the ballot
print 'Select the ballot you wish to export.'
ballotToExport = input.getBallotSelection()
# Check if a ballot was selected
if ballotToExport == False:
# Invalid index or name given
print 'Ballot not found.'
else:
# Export the ballot
ballot.export(ballotToExport)
# Return control to the program
print 'Ballot "' + ballotToExport.title + '" exported to: ' + os.getcwd() + '/' + ballotToExport.title + ' Export.csv'
# Tally the votes of a ballot
def tallyResults():
# Get the ballot
ballotToTally = input.getBallotSelection()
# Show the saved keys and get the users selection
print 'Select the key used for the ballot. You need the private key to view the result.'
key = input.getKeySelection()
# Check that the user has the private key
if (key.privateKey == False):
# Alert the user they do not possess the private key
print 'Private key not found for the selected key.'
else:
# Get the votes
votes = ballot.getVotes(ballotToTally)
# Calculate the result using the key
results = crypto.addVotes(votes, key.publicKey)
print 'Results: '
for index in xrange(len(results)):
print 'Candidate ' + str(index+1) + ': ' + str(crypto.decrypt(key.privateKey, results[index]))
# Delete a ballot
def deleteBallot():
# Show the saved ballots and get the users selection
ballotToDelete = input.getBallotSelection()
# Check if a balloy was selected
if ballotToDelete == False:
# Invalid index or name given
print 'Ballot not found.'
else:
# Delete the ballot
ballot.delete(ballotToDelete)
# Return control to the program
print 'Ballot "' + ballotToDelete.title + '" deleted.'
# List the accounts in the console
def listAccounts():
output.allAccounts()
# Generate and save a new set of keys
def newKey():
# Get the name for the key
keyName = raw_input("Please enter a name for the key: ")
# Generate a new key pair
publicKey, privateKey = crypto.generateKeyPair()
# Save the key
keys.saveKey(keyName, publicKey, privateKey)
# Return control to the app
print 'Key "' + keyName + '" saved.'
# List all the keys in the console
def listKeys():
output.allKeys()
# Import a key
def importKey():
# Get the path for the key file
filePath = raw_input("Please give the path to the key file: ")
# Import the key file at the given path
keys.importKey(filePath)
# Return control to the program
print 'Key at "' + filePath + '" saved.'
# Export a key
def exportKey():
# Show the saved keys and get the users selection
keyToExport = input.getKeySelection()
# Check if a key was selected
if keyToExport == False:
# Invalid index or name given
print 'Key not found.'
else:
# Initalise the include private key flag
includePrivateKey = False
# Check if there is a private key
if keyToExport.privateKey:
# Check if the user wants to include the private key
includePrivateKeyInput = raw_input("Do you want to include the private key? (y/n): ")
# Set the flag according to the users response
if includePrivateKeyInput == 'y':
includePrivateKey = True
else:
includePrivateKey = False
# Export the private key
keys.export(keyToExport, includePrivateKey)
# Return control to the program
print 'Key "' + keyToExport.name + '" exported to: ' + os.getcwd() + '/' + keyToExport.name + ' Export.csv'
# Test a key is working correctly
def testKey():
# Show the saved keys and get the users selection
keyToTest = input.getKeySelection()
# Check if a key was selected
if keyToTest == False:
# Invalid index or name given
print 'Key not found.'
else:
# Check the key can be tested (has private key)
if keyToTest.privateKey:
# Test the key can encrypt and decrypt an integer
if keys.testEncryption(keyToTest):
print 'Encryption test passed.'
else:
print 'Encryption test failed.'
# Test the key can perform a homomorphic addition
if keys.testHomomorphism(keyToTest):
print 'Homomorphism test passed.'
else:
print 'Homomorphism test failed.'
# Return control to the program
print 'Test of key "' + keyToTest.name + '" complete.'
else:
# Inform the user the user there is not private key
print 'Key cannot be tested as there is no private key.'
# Delete a key
def deleteKey():
# Show the saved keys and get the users selection
keyToDelete = input.getKeySelection()
# Check if a key was selected
if keyToDelete == False:
# Invalid index or name given
print 'Key not found.'
else:
# Delete the key
keys.delete(keyToDelete)
# Return control to the program
print 'Key "' + keyToDelete.name + '" deleted.'
# Define the possible commands
commandMappings = {
'init-ballot': initaliseBallot,
'list-ballots': listBallots,
'ballot-info': ballotInfo,
'cast-vote': castVote,
'import-ballot': importBallot,
'export-ballot': exportBallot,
'tally-results': tallyResults,
'delete-ballot': deleteBallot,
'list-accounts': listAccounts,
'new-key': newKey,
'list-keys': listKeys,
'import-key': importKey,
'export-key': exportKey,
'test-key': testKey,
'delete-key': deleteKey,
'help': app.outputHelp,
'quit': app.quit
}
# Take an input and run the appropriate command (if valid)
def run(input):
# Check if the given command exists in the mapping
if input in commandMappings:
# Get the given commands corresponding function
commandFunction = commandMappings[input]
# Call the appropriate command function
commandFunction()
else:
# Display an invalid command messages
print 'Invalid command given, use "help" to see list of valid commands.'
| mrsamuelbarnes/Blockchain-Ballot | commands.py | Python | mit | 8,768 |
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
# Python
import base64
import json
import yaml
import logging
import os
import re
import subprocess
import stat
import sys
import urllib
import urlparse
import threading
import contextlib
import tempfile
import six
import psutil
from functools import reduce, wraps
from StringIO import StringIO
from decimal import Decimal
# Django
from django.core.exceptions import ObjectDoesNotExist
from django.db import DatabaseError
from django.utils.translation import ugettext_lazy as _
from django.db.models.fields.related import ForeignObjectRel, ManyToManyField
from django.db.models.query import QuerySet
from django.db.models import Q
# Django REST Framework
from rest_framework.exceptions import ParseError, PermissionDenied
from django.utils.encoding import smart_str
from django.utils.text import slugify
from django.apps import apps
logger = logging.getLogger('awx.main.utils')
__all__ = ['get_object_or_400', 'get_object_or_403', 'camelcase_to_underscore', 'memoize', 'memoize_delete',
'get_ansible_version', 'get_ssh_version', 'get_licenser', 'get_awx_version', 'update_scm_url',
'get_type_for_model', 'get_model_for_type', 'copy_model_by_class', 'region_sorting',
'copy_m2m_relationships', 'prefetch_page_capabilities', 'to_python_boolean',
'ignore_inventory_computed_fields', 'ignore_inventory_group_removal',
'_inventory_updates', 'get_pk_from_dict', 'getattrd', 'getattr_dne', 'NoDefaultProvided',
'get_current_apps', 'set_current_apps', 'OutputEventFilter', 'OutputVerboseFilter',
'extract_ansible_vars', 'get_search_fields', 'get_system_task_capacity', 'get_cpu_capacity', 'get_mem_capacity',
'wrap_args_with_proot', 'build_proot_temp_dir', 'check_proot_installed', 'model_to_dict',
'model_instance_diff', 'timestamp_apiformat', 'parse_yaml_or_json', 'RequireDebugTrueOrTest',
'has_model_field_prefetched', 'set_environ', 'IllegalArgumentError', 'get_custom_venv_choices', 'get_external_account',
'task_manager_bulk_reschedule', 'schedule_task_manager']
def get_object_or_400(klass, *args, **kwargs):
'''
Return a single object from the given model or queryset based on the query
params, otherwise raise an exception that will return in a 400 response.
'''
from django.shortcuts import _get_queryset
queryset = _get_queryset(klass)
try:
return queryset.get(*args, **kwargs)
except queryset.model.DoesNotExist as e:
raise ParseError(*e.args)
except queryset.model.MultipleObjectsReturned as e:
raise ParseError(*e.args)
def get_object_or_403(klass, *args, **kwargs):
'''
Return a single object from the given model or queryset based on the query
params, otherwise raise an exception that will return in a 403 response.
'''
from django.shortcuts import _get_queryset
queryset = _get_queryset(klass)
try:
return queryset.get(*args, **kwargs)
except queryset.model.DoesNotExist as e:
raise PermissionDenied(*e.args)
except queryset.model.MultipleObjectsReturned as e:
raise PermissionDenied(*e.args)
def to_python_boolean(value, allow_none=False):
value = six.text_type(value)
if value.lower() in ('true', '1', 't'):
return True
elif value.lower() in ('false', '0', 'f'):
return False
elif allow_none and value.lower() in ('none', 'null'):
return None
else:
raise ValueError(_(u'Unable to convert "%s" to boolean') % six.text_type(value))
def region_sorting(region):
if region[1].lower() == 'all':
return -1
elif region[1].lower().startswith('us'):
return 0
return region[1]
def camelcase_to_underscore(s):
'''
Convert CamelCase names to lowercase_with_underscore.
'''
s = re.sub(r'(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))', '_\\1', s)
return s.lower().strip('_')
class RequireDebugTrueOrTest(logging.Filter):
'''
Logging filter to output when in DEBUG mode or running tests.
'''
def filter(self, record):
from django.conf import settings
return settings.DEBUG or 'test' in sys.argv
class IllegalArgumentError(ValueError):
pass
def get_memoize_cache():
from django.core.cache import cache
return cache
def memoize(ttl=60, cache_key=None, track_function=False):
'''
Decorator to wrap a function and cache its result.
'''
if cache_key and track_function:
raise IllegalArgumentError("Can not specify cache_key when track_function is True")
cache = get_memoize_cache()
def memoize_decorator(f):
@wraps(f)
def _memoizer(*args, **kwargs):
if track_function:
cache_dict_key = slugify('%r %r' % (args, kwargs))
key = slugify("%s" % f.__name__)
cache_dict = cache.get(key) or dict()
if cache_dict_key not in cache_dict:
value = f(*args, **kwargs)
cache_dict[cache_dict_key] = value
cache.set(key, cache_dict, ttl)
else:
value = cache_dict[cache_dict_key]
else:
key = cache_key or slugify('%s %r %r' % (f.__name__, args, kwargs))
value = cache.get(key)
if value is None:
value = f(*args, **kwargs)
cache.set(key, value, ttl)
return value
return _memoizer
return memoize_decorator
def memoize_delete(function_name):
cache = get_memoize_cache()
return cache.delete(function_name)
@memoize()
def get_ansible_version():
'''
Return Ansible version installed.
'''
try:
proc = subprocess.Popen(['ansible', '--version'],
stdout=subprocess.PIPE)
result = proc.communicate()[0]
return result.split('\n')[0].replace('ansible', '').strip()
except Exception:
return 'unknown'
@memoize()
def get_ssh_version():
'''
Return SSH version installed.
'''
try:
proc = subprocess.Popen(['ssh', '-V'],
stderr=subprocess.PIPE)
result = proc.communicate()[1]
return result.split(" ")[0].split("_")[1]
except Exception:
return 'unknown'
def get_awx_version():
'''
Return AWX version as reported by setuptools.
'''
from awx import __version__
try:
import pkg_resources
return pkg_resources.require('awx')[0].version
except Exception:
return __version__
class StubLicense(object):
features = {
'activity_streams': True,
'ha': True,
'ldap': True,
'multiple_organizations': True,
'surveys': True,
'system_tracking': True,
'rebranding': True,
'enterprise_auth': True,
'workflows': True,
}
def validate(self):
return dict(license_key='OPEN',
valid_key=True,
compliant=True,
features=self.features,
license_type='open')
def get_licenser(*args, **kwargs):
try:
from tower_license import TowerLicense
return TowerLicense(*args, **kwargs)
except ImportError:
return StubLicense(*args, **kwargs)
def update_scm_url(scm_type, url, username=True, password=True,
check_special_cases=True, scp_format=False):
'''
Update the given SCM URL to add/replace/remove the username/password. When
username/password is True, preserve existing username/password, when
False (None, '', etc.), remove any existing username/password, otherwise
replace username/password. Also validates the given URL.
'''
# Handle all of the URL formats supported by the SCM systems:
# git: https://www.kernel.org/pub/software/scm/git/docs/git-clone.html#URLS
# hg: http://www.selenic.com/mercurial/hg.1.html#url-paths
# svn: http://svnbook.red-bean.com/en/1.7/svn-book.html#svn.advanced.reposurls
if scm_type not in ('git', 'hg', 'svn', 'insights'):
raise ValueError(_('Unsupported SCM type "%s"') % str(scm_type))
if not url.strip():
return ''
parts = urlparse.urlsplit(url)
try:
parts.port
except ValueError:
raise ValueError(_('Invalid %s URL') % scm_type)
if parts.scheme == 'git+ssh' and not scp_format:
raise ValueError(_('Unsupported %s URL') % scm_type)
if '://' not in url:
# Handle SCP-style URLs for git (e.g. [user@]host.xz:path/to/repo.git/).
if scm_type == 'git' and ':' in url:
if '@' in url:
userpass, hostpath = url.split('@', 1)
else:
userpass, hostpath = '', url
if hostpath.count(':') > 1:
raise ValueError(_('Invalid %s URL') % scm_type)
host, path = hostpath.split(':', 1)
#if not path.startswith('/') and not path.startswith('~/'):
# path = '~/%s' % path
#if path.startswith('/'):
# path = path.lstrip('/')
hostpath = '/'.join([host, path])
modified_url = '@'.join(filter(None, [userpass, hostpath]))
# git+ssh scheme identifies URLs that should be converted back to
# SCP style before passed to git module.
parts = urlparse.urlsplit('git+ssh://%s' % modified_url)
# Handle local paths specified without file scheme (e.g. /path/to/foo).
# Only supported by git and hg.
elif scm_type in ('git', 'hg'):
if not url.startswith('/'):
parts = urlparse.urlsplit('file:///%s' % url)
else:
parts = urlparse.urlsplit('file://%s' % url)
else:
raise ValueError(_('Invalid %s URL') % scm_type)
# Validate that scheme is valid for given scm_type.
scm_type_schemes = {
'git': ('ssh', 'git', 'git+ssh', 'http', 'https', 'ftp', 'ftps', 'file'),
'hg': ('http', 'https', 'ssh', 'file'),
'svn': ('http', 'https', 'svn', 'svn+ssh', 'file'),
'insights': ('http', 'https')
}
if parts.scheme not in scm_type_schemes.get(scm_type, ()):
raise ValueError(_('Unsupported %s URL') % scm_type)
if parts.scheme == 'file' and parts.netloc not in ('', 'localhost'):
raise ValueError(_('Unsupported host "%s" for file:// URL') % (parts.netloc))
elif parts.scheme != 'file' and not parts.netloc:
raise ValueError(_('Host is required for %s URL') % parts.scheme)
if username is True:
netloc_username = parts.username or ''
elif username:
netloc_username = username
else:
netloc_username = ''
if password is True:
netloc_password = parts.password or ''
elif password:
netloc_password = password
else:
netloc_password = ''
# Special handling for github/bitbucket SSH URLs.
if check_special_cases:
special_git_hosts = ('github.com', 'bitbucket.org', 'altssh.bitbucket.org')
if scm_type == 'git' and parts.scheme.endswith('ssh') and parts.hostname in special_git_hosts and netloc_username != 'git':
raise ValueError(_('Username must be "git" for SSH access to %s.') % parts.hostname)
if scm_type == 'git' and parts.scheme.endswith('ssh') and parts.hostname in special_git_hosts and netloc_password:
#raise ValueError('Password not allowed for SSH access to %s.' % parts.hostname)
netloc_password = ''
special_hg_hosts = ('bitbucket.org', 'altssh.bitbucket.org')
if scm_type == 'hg' and parts.scheme == 'ssh' and parts.hostname in special_hg_hosts and netloc_username != 'hg':
raise ValueError(_('Username must be "hg" for SSH access to %s.') % parts.hostname)
if scm_type == 'hg' and parts.scheme == 'ssh' and netloc_password:
#raise ValueError('Password not supported for SSH with Mercurial.')
netloc_password = ''
if netloc_username and parts.scheme != 'file' and scm_type != "insights":
netloc = u':'.join([urllib.quote(x,safe='') for x in (netloc_username, netloc_password) if x])
else:
netloc = u''
netloc = u'@'.join(filter(None, [netloc, parts.hostname]))
if parts.port:
netloc = u':'.join([netloc, six.text_type(parts.port)])
new_url = urlparse.urlunsplit([parts.scheme, netloc, parts.path,
parts.query, parts.fragment])
if scp_format and parts.scheme == 'git+ssh':
new_url = new_url.replace('git+ssh://', '', 1).replace('/', ':', 1)
return new_url
def get_allowed_fields(obj, serializer_mapping):
if serializer_mapping is not None and obj.__class__ in serializer_mapping:
serializer_actual = serializer_mapping[obj.__class__]()
allowed_fields = [x for x in serializer_actual.fields if not serializer_actual.fields[x].read_only] + ['id']
else:
allowed_fields = [x.name for x in obj._meta.fields]
ACTIVITY_STREAM_FIELD_EXCLUSIONS = {
'user': ['last_login'],
'oauth2accesstoken': ['last_used'],
'oauth2application': ['client_secret']
}
field_blacklist = ACTIVITY_STREAM_FIELD_EXCLUSIONS.get(obj._meta.model_name, [])
if field_blacklist:
allowed_fields = [f for f in allowed_fields if f not in field_blacklist]
return allowed_fields
def _convert_model_field_for_display(obj, field_name, password_fields=None):
# NOTE: Careful modifying the value of field_val, as it could modify
# underlying model object field value also.
try:
field_val = getattr(obj, field_name, None)
except ObjectDoesNotExist:
return '<missing {}>-{}'.format(obj._meta.verbose_name, getattr(obj, '{}_id'.format(field_name)))
if password_fields is None:
password_fields = set(getattr(type(obj), 'PASSWORD_FIELDS', [])) | set(['password'])
if field_name in password_fields or (
isinstance(field_val, six.string_types) and
field_val.startswith('$encrypted$')
):
return u'hidden'
if hasattr(obj, 'display_%s' % field_name):
field_val = getattr(obj, 'display_%s' % field_name)()
if isinstance(field_val, (list, dict)):
try:
field_val = json.dumps(field_val, ensure_ascii=False)
except Exception:
pass
if type(field_val) not in (bool, int, type(None), long):
field_val = smart_str(field_val)
return field_val
def model_instance_diff(old, new, serializer_mapping=None):
"""
Calculate the differences between two model instances. One of the instances may be None (i.e., a newly
created model or deleted model). This will cause all fields with a value to have changed (from None).
serializer_mapping are used to determine read-only fields.
When provided, read-only fields will not be included in the resulting dictionary
"""
from django.db.models import Model
if not(old is None or isinstance(old, Model)):
raise TypeError('The supplied old instance is not a valid model instance.')
if not(new is None or isinstance(new, Model)):
raise TypeError('The supplied new instance is not a valid model instance.')
old_password_fields = set(getattr(type(old), 'PASSWORD_FIELDS', [])) | set(['password'])
new_password_fields = set(getattr(type(new), 'PASSWORD_FIELDS', [])) | set(['password'])
diff = {}
allowed_fields = get_allowed_fields(new, serializer_mapping)
for field in allowed_fields:
old_value = getattr(old, field, None)
new_value = getattr(new, field, None)
if old_value != new_value:
diff[field] = (
_convert_model_field_for_display(old, field, password_fields=old_password_fields),
_convert_model_field_for_display(new, field, password_fields=new_password_fields),
)
if len(diff) == 0:
diff = None
return diff
def model_to_dict(obj, serializer_mapping=None):
"""
Serialize a model instance to a dictionary as best as possible
serializer_mapping are used to determine read-only fields.
When provided, read-only fields will not be included in the resulting dictionary
"""
password_fields = set(getattr(type(obj), 'PASSWORD_FIELDS', [])) | set(['password'])
attr_d = {}
allowed_fields = get_allowed_fields(obj, serializer_mapping)
for field in obj._meta.fields:
if field.name not in allowed_fields:
continue
attr_d[field.name] = _convert_model_field_for_display(obj, field.name, password_fields=password_fields)
return attr_d
def copy_model_by_class(obj1, Class2, fields, kwargs):
'''
Creates a new unsaved object of type Class2 using the fields from obj1
values in kwargs can override obj1
'''
create_kwargs = {}
for field_name in fields:
# Foreign keys can be specified as field_name or field_name_id.
id_field_name = '%s_id' % field_name
if hasattr(obj1, id_field_name):
if field_name in kwargs:
value = kwargs[field_name]
elif id_field_name in kwargs:
value = kwargs[id_field_name]
else:
value = getattr(obj1, id_field_name)
if hasattr(value, 'id'):
value = value.id
create_kwargs[id_field_name] = value
elif field_name in kwargs:
if field_name == 'extra_vars' and isinstance(kwargs[field_name], dict):
create_kwargs[field_name] = json.dumps(kwargs['extra_vars'])
elif not isinstance(Class2._meta.get_field(field_name), (ForeignObjectRel, ManyToManyField)):
create_kwargs[field_name] = kwargs[field_name]
elif hasattr(obj1, field_name):
field_obj = obj1._meta.get_field(field_name)
if not isinstance(field_obj, ManyToManyField):
create_kwargs[field_name] = getattr(obj1, field_name)
# Apply class-specific extra processing for origination of unified jobs
if hasattr(obj1, '_update_unified_job_kwargs') and obj1.__class__ != Class2:
new_kwargs = obj1._update_unified_job_kwargs(create_kwargs, kwargs)
else:
new_kwargs = create_kwargs
return Class2(**new_kwargs)
def copy_m2m_relationships(obj1, obj2, fields, kwargs=None):
'''
In-place operation.
Given two saved objects, copies related objects from obj1
to obj2 to field of same name, if field occurs in `fields`
'''
for field_name in fields:
if hasattr(obj1, field_name):
field_obj = obj1._meta.get_field(field_name)
if isinstance(field_obj, ManyToManyField):
# Many to Many can be specified as field_name
src_field_value = getattr(obj1, field_name)
if kwargs and field_name in kwargs:
override_field_val = kwargs[field_name]
if isinstance(override_field_val, (set, list, QuerySet)):
getattr(obj2, field_name).add(*override_field_val)
continue
if override_field_val.__class__.__name__ is 'ManyRelatedManager':
src_field_value = override_field_val
dest_field = getattr(obj2, field_name)
dest_field.add(*list(src_field_value.all().values_list('id', flat=True)))
def get_type_for_model(model):
'''
Return type name for a given model class.
'''
opts = model._meta.concrete_model._meta
return camelcase_to_underscore(opts.object_name)
def get_model_for_type(type):
'''
Return model class for a given type name.
'''
from django.contrib.contenttypes.models import ContentType
for ct in ContentType.objects.filter(Q(app_label='main') | Q(app_label='auth', model='user')):
ct_model = ct.model_class()
if not ct_model:
continue
ct_type = get_type_for_model(ct_model)
if type == ct_type:
return ct_model
else:
raise DatabaseError('"{}" is not a valid AWX model.'.format(type))
def prefetch_page_capabilities(model, page, prefetch_list, user):
'''
Given a `page` list of objects, a nested dictionary of user_capabilities
are returned by id, ex.
{
4: {'edit': True, 'start': True},
6: {'edit': False, 'start': False}
}
Each capability is produced for all items in the page in a single query
Examples of prefetch language:
prefetch_list = ['admin', 'execute']
--> prefetch the admin (edit) and execute (start) permissions for
items in list for current user
prefetch_list = ['inventory.admin']
--> prefetch the related inventory FK permissions for current user,
and put it into the object's cache
prefetch_list = [{'copy': ['inventory.admin', 'project.admin']}]
--> prefetch logical combination of admin permission to inventory AND
project, put into cache dictionary as "copy"
'''
page_ids = [obj.id for obj in page]
mapping = {}
for obj in page:
mapping[obj.id] = {}
for prefetch_entry in prefetch_list:
display_method = None
if type(prefetch_entry) is dict:
display_method = prefetch_entry.keys()[0]
paths = prefetch_entry[display_method]
else:
paths = prefetch_entry
if type(paths) is not list:
paths = [paths]
# Build the query for accessible_objects according the user & role(s)
filter_args = []
for role_path in paths:
if '.' in role_path:
res_path = '__'.join(role_path.split('.')[:-1])
role_type = role_path.split('.')[-1]
parent_model = model
for subpath in role_path.split('.')[:-1]:
parent_model = parent_model._meta.get_field(subpath).related_model
filter_args.append(Q(
Q(**{'%s__pk__in' % res_path: parent_model.accessible_pk_qs(user, '%s_role' % role_type)}) |
Q(**{'%s__isnull' % res_path: True})))
else:
role_type = role_path
filter_args.append(Q(**{'pk__in': model.accessible_pk_qs(user, '%s_role' % role_type)}))
if display_method is None:
# Role name translation to UI names for methods
display_method = role_type
if role_type == 'admin':
display_method = 'edit'
elif role_type in ['execute', 'update']:
display_method = 'start'
# Union that query with the list of items on page
filter_args.append(Q(pk__in=page_ids))
ids_with_role = set(model.objects.filter(*filter_args).values_list('pk', flat=True))
# Save data item-by-item
for obj in page:
mapping[obj.pk][display_method] = bool(obj.pk in ids_with_role)
return mapping
def validate_vars_type(vars_obj):
if not isinstance(vars_obj, dict):
vars_type = type(vars_obj)
if hasattr(vars_type, '__name__'):
data_type = vars_type.__name__
else:
data_type = str(vars_type)
raise AssertionError(
_('Input type `{data_type}` is not a dictionary').format(
data_type=data_type)
)
def parse_yaml_or_json(vars_str, silent_failure=True):
'''
Attempt to parse a string of variables.
First, with JSON parser, if that fails, then with PyYAML.
If both attempts fail, return an empty dictionary if `silent_failure`
is True, re-raise combination error if `silent_failure` if False.
'''
if isinstance(vars_str, dict):
return vars_str
elif isinstance(vars_str, six.string_types) and vars_str == '""':
return {}
try:
vars_dict = json.loads(vars_str)
validate_vars_type(vars_dict)
except (ValueError, TypeError, AssertionError) as json_err:
try:
vars_dict = yaml.safe_load(vars_str)
# Can be None if '---'
if vars_dict is None:
vars_dict = {}
validate_vars_type(vars_dict)
if not silent_failure:
# is valid YAML, check that it is compatible with JSON
try:
json.dumps(vars_dict)
except (ValueError, TypeError, AssertionError) as json_err2:
raise ParseError(_(
'Variables not compatible with JSON standard (error: {json_error})').format(
json_error=str(json_err2)))
except (yaml.YAMLError, TypeError, AttributeError, AssertionError) as yaml_err:
if silent_failure:
return {}
raise ParseError(_(
'Cannot parse as JSON (error: {json_error}) or '
'YAML (error: {yaml_error}).').format(
json_error=str(json_err), yaml_error=str(yaml_err)))
return vars_dict
def get_cpu_capacity():
from django.conf import settings
settings_forkcpu = getattr(settings, 'SYSTEM_TASK_FORKS_CPU', None)
env_forkcpu = os.getenv('SYSTEM_TASK_FORKS_CPU', None)
settings_abscpu = getattr(settings, 'SYSTEM_TASK_ABS_CPU', None)
env_abscpu = os.getenv('SYSTEM_TASK_ABS_CPU', None)
if env_abscpu is not None:
return 0, int(env_abscpu)
elif settings_abscpu is not None:
return 0, int(settings_abscpu)
cpu = psutil.cpu_count()
if env_forkcpu:
forkcpu = int(env_forkcpu)
elif settings_forkcpu:
forkcpu = int(settings_forkcpu)
else:
forkcpu = 4
return (cpu, cpu * forkcpu)
def get_mem_capacity():
from django.conf import settings
settings_forkmem = getattr(settings, 'SYSTEM_TASK_FORKS_MEM', None)
env_forkmem = os.getenv('SYSTEM_TASK_FORKS_MEM', None)
settings_absmem = getattr(settings, 'SYSTEM_TASK_ABS_MEM', None)
env_absmem = os.getenv('SYSTEM_TASK_ABS_MEM', None)
if env_absmem is not None:
return 0, int(env_absmem)
elif settings_absmem is not None:
return 0, int(settings_absmem)
if env_forkmem:
forkmem = int(env_forkmem)
elif settings_forkmem:
forkmem = int(settings_forkmem)
else:
forkmem = 100
mem = psutil.virtual_memory().total
return (mem, max(1, ((mem / 1024 / 1024) - 2048) / forkmem))
def get_system_task_capacity(scale=Decimal(1.0), cpu_capacity=None, mem_capacity=None):
'''
Measure system memory and use it as a baseline for determining the system's capacity
'''
from django.conf import settings
settings_forks = getattr(settings, 'SYSTEM_TASK_FORKS_CAPACITY', None)
env_forks = os.getenv('SYSTEM_TASK_FORKS_CAPACITY', None)
if env_forks:
return int(env_forks)
elif settings_forks:
return int(settings_forks)
if cpu_capacity is None:
_, cpu_cap = get_cpu_capacity()
else:
cpu_cap = cpu_capacity
if mem_capacity is None:
_, mem_cap = get_mem_capacity()
else:
mem_cap = mem_capacity
return min(mem_cap, cpu_cap) + ((max(mem_cap, cpu_cap) - min(mem_cap, cpu_cap)) * scale)
_inventory_updates = threading.local()
_task_manager = threading.local()
@contextlib.contextmanager
def ignore_inventory_computed_fields():
'''
Context manager to ignore updating inventory computed fields.
'''
try:
previous_value = getattr(_inventory_updates, 'is_updating', False)
_inventory_updates.is_updating = True
yield
finally:
_inventory_updates.is_updating = previous_value
def _schedule_task_manager():
from awx.main.scheduler.tasks import run_task_manager
from django.db import connection
# runs right away if not in transaction
connection.on_commit(lambda: run_task_manager.delay())
@contextlib.contextmanager
def task_manager_bulk_reschedule():
"""Context manager to avoid submitting task multiple times.
"""
try:
previous_flag = getattr(_task_manager, 'bulk_reschedule', False)
previous_value = getattr(_task_manager, 'needs_scheduling', False)
_task_manager.bulk_reschedule = True
_task_manager.needs_scheduling = False
yield
finally:
_task_manager.bulk_reschedule = previous_flag
if _task_manager.needs_scheduling:
_schedule_task_manager()
_task_manager.needs_scheduling = previous_value
def schedule_task_manager():
if getattr(_task_manager, 'bulk_reschedule', False):
_task_manager.needs_scheduling = True
return
_schedule_task_manager()
@contextlib.contextmanager
def ignore_inventory_group_removal():
'''
Context manager to ignore moving groups/hosts when group is deleted.
'''
try:
previous_value = getattr(_inventory_updates, 'is_removing', False)
_inventory_updates.is_removing = True
yield
finally:
_inventory_updates.is_removing = previous_value
@contextlib.contextmanager
def set_environ(**environ):
'''
Temporarily set the process environment variables.
>>> with set_environ(FOO='BAR'):
... assert os.environ['FOO'] == 'BAR'
'''
old_environ = os.environ.copy()
try:
os.environ.update(environ)
yield
finally:
os.environ.clear()
os.environ.update(old_environ)
@memoize()
def check_proot_installed():
'''
Check that proot is installed.
'''
from django.conf import settings
cmd = [getattr(settings, 'AWX_PROOT_CMD', 'bwrap'), '--version']
try:
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
proc.communicate()
return bool(proc.returncode == 0)
except (OSError, ValueError) as e:
if isinstance(e, ValueError) or getattr(e, 'errno', 1) != 2: # ENOENT, no such file or directory
logger.exception('bwrap unavailable for unexpected reason.')
return False
def build_proot_temp_dir():
'''
Create a temporary directory for proot to use.
'''
from django.conf import settings
path = tempfile.mkdtemp(prefix='awx_proot_', dir=settings.AWX_PROOT_BASE_PATH)
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
return path
def wrap_args_with_proot(args, cwd, **kwargs):
'''
Wrap existing command line with proot to restrict access to:
- AWX_PROOT_BASE_PATH (generally, /tmp) (except for own /tmp files)
For non-isolated nodes:
- /etc/tower (to prevent obtaining db info or secret key)
- /var/lib/awx (except for current project)
- /var/log/tower
- /var/log/supervisor
'''
from django.conf import settings
cwd = os.path.realpath(cwd)
new_args = [getattr(settings, 'AWX_PROOT_CMD', 'bwrap'), '--unshare-pid', '--dev-bind', '/', '/', '--proc', '/proc']
hide_paths = [settings.AWX_PROOT_BASE_PATH]
if not kwargs.get('isolated'):
hide_paths.extend(['/etc/tower', '/var/lib/awx', '/var/log',
settings.PROJECTS_ROOT, settings.JOBOUTPUT_ROOT])
hide_paths.extend(getattr(settings, 'AWX_PROOT_HIDE_PATHS', None) or [])
for path in sorted(set(hide_paths)):
if not os.path.exists(path):
continue
path = os.path.realpath(path)
if os.path.isdir(path):
new_path = tempfile.mkdtemp(dir=kwargs['proot_temp_dir'])
os.chmod(new_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
else:
handle, new_path = tempfile.mkstemp(dir=kwargs['proot_temp_dir'])
os.close(handle)
os.chmod(new_path, stat.S_IRUSR | stat.S_IWUSR)
new_args.extend(['--bind', '%s' %(new_path,), '%s' % (path,)])
if kwargs.get('isolated'):
show_paths = [kwargs['private_data_dir']]
elif 'private_data_dir' in kwargs:
show_paths = [cwd, kwargs['private_data_dir']]
else:
show_paths = [cwd]
for venv in (
settings.ANSIBLE_VENV_PATH,
settings.AWX_VENV_PATH,
kwargs.get('proot_custom_virtualenv')
):
if venv:
new_args.extend(['--ro-bind', venv, venv])
show_paths.extend(getattr(settings, 'AWX_PROOT_SHOW_PATHS', None) or [])
show_paths.extend(kwargs.get('proot_show_paths', []))
for path in sorted(set(show_paths)):
if not os.path.exists(path):
continue
path = os.path.realpath(path)
new_args.extend(['--bind', '%s' % (path,), '%s' % (path,)])
if kwargs.get('isolated'):
if 'ansible-playbook' in args:
# playbook runs should cwd to the SCM checkout dir
new_args.extend(['--chdir', os.path.join(kwargs['private_data_dir'], 'project')])
else:
# ad-hoc runs should cwd to the root of the private data dir
new_args.extend(['--chdir', kwargs['private_data_dir']])
else:
new_args.extend(['--chdir', cwd])
new_args.extend(args)
return new_args
def get_pk_from_dict(_dict, key):
'''
Helper for obtaining a pk from user data dict or None if not present.
'''
try:
val = _dict[key]
if isinstance(val, object) and hasattr(val, 'id'):
return val.id # return id if given model object
return int(val)
except (TypeError, KeyError, ValueError):
return None
def timestamp_apiformat(timestamp):
timestamp = timestamp.isoformat()
if timestamp.endswith('+00:00'):
timestamp = timestamp[:-6] + 'Z'
return timestamp
# damn you python 2.6
def timedelta_total_seconds(timedelta):
return (
timedelta.microseconds + 0.0 +
(timedelta.seconds + timedelta.days * 24 * 3600) * 10 ** 6) / 10 ** 6
class NoDefaultProvided(object):
pass
def getattrd(obj, name, default=NoDefaultProvided):
"""
Same as getattr(), but allows dot notation lookup
Discussed in:
http://stackoverflow.com/questions/11975781
"""
try:
return reduce(getattr, name.split("."), obj)
except AttributeError:
if default != NoDefaultProvided:
return default
raise
def getattr_dne(obj, name, notfound=ObjectDoesNotExist):
try:
return getattr(obj, name)
except notfound:
return None
current_apps = apps
def set_current_apps(apps):
global current_apps
current_apps = apps
def get_current_apps():
global current_apps
return current_apps
def get_custom_venv_choices():
from django.conf import settings
custom_venv_path = settings.BASE_VENV_PATH
if os.path.exists(custom_venv_path):
return [
os.path.join(custom_venv_path, x.decode('utf-8'), '')
for x in os.listdir(custom_venv_path)
if x != 'awx' and
os.path.isdir(os.path.join(custom_venv_path, x)) and
os.path.exists(os.path.join(custom_venv_path, x, 'bin', 'activate'))
]
else:
return []
class OutputEventFilter(object):
'''
File-like object that looks for encoded job events in stdout data.
'''
EVENT_DATA_RE = re.compile(r'\x1b\[K((?:[A-Za-z0-9+/=]+\x1b\[\d+D)+)\x1b\[K')
def __init__(self, event_callback):
self._event_callback = event_callback
self._counter = 0
self._start_line = 0
self._buffer = StringIO()
self._last_chunk = ''
self._current_event_data = None
def flush(self):
# pexpect wants to flush the file it writes to, but we're not
# actually capturing stdout to a raw file; we're just
# implementing a custom `write` method to discover and emit events from
# the stdout stream
pass
def write(self, data):
self._buffer.write(data)
# keep a sliding window of the last chunk written so we can detect
# event tokens and determine if we need to perform a search of the full
# buffer
should_search = '\x1b[K' in (self._last_chunk + data)
self._last_chunk = data
# Only bother searching the buffer if we recently saw a start/end
# token (\x1b[K)
while should_search:
value = self._buffer.getvalue()
match = self.EVENT_DATA_RE.search(value)
if not match:
break
try:
base64_data = re.sub(r'\x1b\[\d+D', '', match.group(1))
event_data = json.loads(base64.b64decode(base64_data))
except ValueError:
event_data = {}
self._emit_event(value[:match.start()], event_data)
remainder = value[match.end():]
self._buffer = StringIO()
self._buffer.write(remainder)
self._last_chunk = remainder
def close(self):
value = self._buffer.getvalue()
if value:
self._emit_event(value)
self._buffer = StringIO()
self._event_callback(dict(event='EOF', final_counter=self._counter))
def _emit_event(self, buffered_stdout, next_event_data=None):
next_event_data = next_event_data or {}
if self._current_event_data:
event_data = self._current_event_data
stdout_chunks = [buffered_stdout]
elif buffered_stdout:
event_data = dict(event='verbose')
stdout_chunks = buffered_stdout.splitlines(True)
else:
stdout_chunks = []
for stdout_chunk in stdout_chunks:
self._counter += 1
event_data['counter'] = self._counter
event_data['stdout'] = stdout_chunk[:-2] if len(stdout_chunk) > 2 else ""
n_lines = stdout_chunk.count('\n')
event_data['start_line'] = self._start_line
event_data['end_line'] = self._start_line + n_lines
self._start_line += n_lines
if self._event_callback:
self._event_callback(event_data)
if next_event_data.get('uuid', None):
self._current_event_data = next_event_data
else:
self._current_event_data = None
class OutputVerboseFilter(OutputEventFilter):
'''
File-like object that dispatches stdout data.
Does not search for encoded job event data.
Use for unified job types that do not encode job event data.
'''
def write(self, data):
self._buffer.write(data)
# if the current chunk contains a line break
if data and '\n' in data:
# emit events for all complete lines we know about
lines = self._buffer.getvalue().splitlines(True) # keep ends
remainder = None
# if last line is not a complete line, then exclude it
if '\n' not in lines[-1]:
remainder = lines.pop()
# emit all complete lines
for line in lines:
self._emit_event(line)
self._buffer = StringIO()
# put final partial line back on buffer
if remainder:
self._buffer.write(remainder)
def is_ansible_variable(key):
return key.startswith('ansible_')
def extract_ansible_vars(extra_vars):
extra_vars = parse_yaml_or_json(extra_vars)
ansible_vars = set([])
for key in extra_vars.keys():
if is_ansible_variable(key):
extra_vars.pop(key)
ansible_vars.add(key)
return (extra_vars, ansible_vars)
def get_search_fields(model):
fields = []
for field in model._meta.fields:
if field.name in ('username', 'first_name', 'last_name', 'email',
'name', 'description'):
fields.append(field.name)
return fields
def has_model_field_prefetched(model_obj, field_name):
# NOTE: Update this function if django internal implementation changes.
return getattr(getattr(model_obj, field_name, None),
'prefetch_cache_name', '') in getattr(model_obj, '_prefetched_objects_cache', {})
def get_external_account(user):
from django.conf import settings
from awx.conf.license import feature_enabled
account_type = None
if getattr(settings, 'AUTH_LDAP_SERVER_URI', None) and feature_enabled('ldap'):
try:
if user.pk and user.profile.ldap_dn and not user.has_usable_password():
account_type = "ldap"
except AttributeError:
pass
if (getattr(settings, 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', None) or
getattr(settings, 'SOCIAL_AUTH_GITHUB_KEY', None) or
getattr(settings, 'SOCIAL_AUTH_GITHUB_ORG_KEY', None) or
getattr(settings, 'SOCIAL_AUTH_GITHUB_TEAM_KEY', None) or
getattr(settings, 'SOCIAL_AUTH_SAML_ENABLED_IDPS', None)) and user.social_auth.all():
account_type = "social"
if (getattr(settings, 'RADIUS_SERVER', None) or
getattr(settings, 'TACACSPLUS_HOST', None)) and user.enterprise_auth.all():
account_type = "enterprise"
return account_type
| wwitzel3/awx | awx/main/utils/common.py | Python | apache-2.0 | 41,179 |
from django import template
from datetime import date, timedelta
register = template.Library()
@register.filter
def get_item(dictionary, key):
return dictionary.get(key) if key in dictionary else 0
| sasasaftic/owatam | analysis/templatetags/app_filters.py | Python | mit | 206 |
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from google.cloud._testing import _GAXBaseAPI
TABLE_NAME = 'citizens'
COLUMNS = ['email', 'first_name', 'last_name', 'age']
VALUES = [
['[email protected]', 'Phred', 'Phlyntstone', 32],
['[email protected]', 'Bharney', 'Rhubble', 31],
]
class TestTransaction(unittest.TestCase):
PROJECT_ID = 'project-id'
INSTANCE_ID = 'instance-id'
INSTANCE_NAME = 'projects/' + PROJECT_ID + '/instances/' + INSTANCE_ID
DATABASE_ID = 'database-id'
DATABASE_NAME = INSTANCE_NAME + '/databases/' + DATABASE_ID
SESSION_ID = 'session-id'
SESSION_NAME = DATABASE_NAME + '/sessions/' + SESSION_ID
TRANSACTION_ID = b'DEADBEEF'
def _getTargetClass(self):
from google.cloud.spanner.transaction import Transaction
return Transaction
def _make_one(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_ctor_defaults(self):
session = _Session()
transaction = self._make_one(session)
self.assertTrue(transaction._session is session)
self.assertIsNone(transaction._id)
self.assertIsNone(transaction.committed)
self.assertEqual(transaction._rolled_back, False)
def test__check_state_not_begun(self):
session = _Session()
transaction = self._make_one(session)
with self.assertRaises(ValueError):
transaction._check_state()
def test__check_state_already_committed(self):
session = _Session()
transaction = self._make_one(session)
transaction._id = b'DEADBEEF'
transaction.committed = object()
with self.assertRaises(ValueError):
transaction._check_state()
def test__check_state_already_rolled_back(self):
session = _Session()
transaction = self._make_one(session)
transaction._id = b'DEADBEEF'
transaction._rolled_back = True
with self.assertRaises(ValueError):
transaction._check_state()
def test__check_state_ok(self):
session = _Session()
transaction = self._make_one(session)
transaction._id = b'DEADBEEF'
transaction._check_state() # does not raise
def test__make_txn_selector(self):
session = _Session()
transaction = self._make_one(session)
transaction._id = self.TRANSACTION_ID
selector = transaction._make_txn_selector()
self.assertEqual(selector.id, self.TRANSACTION_ID)
def test_begin_already_begun(self):
session = _Session()
transaction = self._make_one(session)
transaction._id = self.TRANSACTION_ID
with self.assertRaises(ValueError):
transaction.begin()
def test_begin_already_rolled_back(self):
session = _Session()
transaction = self._make_one(session)
transaction._rolled_back = True
with self.assertRaises(ValueError):
transaction.begin()
def test_begin_already_committed(self):
session = _Session()
transaction = self._make_one(session)
transaction.committed = object()
with self.assertRaises(ValueError):
transaction.begin()
def test_begin_w_gax_error(self):
from google.gax.errors import GaxError
database = _Database()
api = database.spanner_api = _FauxSpannerAPI(
_random_gax_error=True)
session = _Session(database)
transaction = self._make_one(session)
with self.assertRaises(GaxError):
transaction.begin()
session_id, txn_options, options = api._begun
self.assertEqual(session_id, session.name)
self.assertTrue(txn_options.HasField('read_write'))
self.assertEqual(options.kwargs['metadata'],
[('google-cloud-resource-prefix', database.name)])
def test_begin_ok(self):
from google.cloud.proto.spanner.v1.transaction_pb2 import (
Transaction as TransactionPB)
transaction_pb = TransactionPB(id=self.TRANSACTION_ID)
database = _Database()
api = database.spanner_api = _FauxSpannerAPI(
_begin_transaction_response=transaction_pb)
session = _Session(database)
transaction = self._make_one(session)
txn_id = transaction.begin()
self.assertEqual(txn_id, self.TRANSACTION_ID)
self.assertEqual(transaction._id, self.TRANSACTION_ID)
session_id, txn_options, options = api._begun
self.assertEqual(session_id, session.name)
self.assertTrue(txn_options.HasField('read_write'))
self.assertEqual(options.kwargs['metadata'],
[('google-cloud-resource-prefix', database.name)])
def test_rollback_not_begun(self):
session = _Session()
transaction = self._make_one(session)
with self.assertRaises(ValueError):
transaction.rollback()
def test_rollback_already_committed(self):
session = _Session()
transaction = self._make_one(session)
transaction._id = self.TRANSACTION_ID
transaction.committed = object()
with self.assertRaises(ValueError):
transaction.rollback()
def test_rollback_already_rolled_back(self):
session = _Session()
transaction = self._make_one(session)
transaction._id = self.TRANSACTION_ID
transaction._rolled_back = True
with self.assertRaises(ValueError):
transaction.rollback()
def test_rollback_w_gax_error(self):
from google.gax.errors import GaxError
database = _Database()
api = database.spanner_api = _FauxSpannerAPI(
_random_gax_error=True)
session = _Session(database)
transaction = self._make_one(session)
transaction._id = self.TRANSACTION_ID
transaction.insert(TABLE_NAME, COLUMNS, VALUES)
with self.assertRaises(GaxError):
transaction.rollback()
self.assertFalse(transaction._rolled_back)
session_id, txn_id, options = api._rolled_back
self.assertEqual(session_id, session.name)
self.assertEqual(txn_id, self.TRANSACTION_ID)
self.assertEqual(options.kwargs['metadata'],
[('google-cloud-resource-prefix', database.name)])
def test_rollback_ok(self):
from google.protobuf.empty_pb2 import Empty
empty_pb = Empty()
database = _Database()
api = database.spanner_api = _FauxSpannerAPI(
_rollback_response=empty_pb)
session = _Session(database)
transaction = self._make_one(session)
transaction._id = self.TRANSACTION_ID
transaction.replace(TABLE_NAME, COLUMNS, VALUES)
transaction.rollback()
self.assertTrue(transaction._rolled_back)
session_id, txn_id, options = api._rolled_back
self.assertEqual(session_id, session.name)
self.assertEqual(txn_id, self.TRANSACTION_ID)
self.assertEqual(options.kwargs['metadata'],
[('google-cloud-resource-prefix', database.name)])
def test_commit_not_begun(self):
session = _Session()
transaction = self._make_one(session)
with self.assertRaises(ValueError):
transaction.commit()
def test_commit_already_committed(self):
session = _Session()
transaction = self._make_one(session)
transaction._id = self.TRANSACTION_ID
transaction.committed = object()
with self.assertRaises(ValueError):
transaction.commit()
def test_commit_already_rolled_back(self):
session = _Session()
transaction = self._make_one(session)
transaction._id = self.TRANSACTION_ID
transaction._rolled_back = True
with self.assertRaises(ValueError):
transaction.commit()
def test_commit_no_mutations(self):
session = _Session()
transaction = self._make_one(session)
transaction._id = self.TRANSACTION_ID
with self.assertRaises(ValueError):
transaction.commit()
def test_commit_w_gax_error(self):
from google.gax.errors import GaxError
database = _Database()
api = database.spanner_api = _FauxSpannerAPI(
_random_gax_error=True)
session = _Session(database)
transaction = self._make_one(session)
transaction._id = self.TRANSACTION_ID
transaction.replace(TABLE_NAME, COLUMNS, VALUES)
with self.assertRaises(GaxError):
transaction.commit()
self.assertIsNone(transaction.committed)
session_id, mutations, txn_id, options = api._committed
self.assertEqual(session_id, session.name)
self.assertEqual(txn_id, self.TRANSACTION_ID)
self.assertEqual(mutations, transaction._mutations)
self.assertEqual(options.kwargs['metadata'],
[('google-cloud-resource-prefix', database.name)])
def test_commit_ok(self):
import datetime
from google.cloud.proto.spanner.v1.spanner_pb2 import CommitResponse
from google.cloud.spanner.keyset import KeySet
from google.cloud._helpers import UTC
from google.cloud._helpers import _datetime_to_pb_timestamp
now = datetime.datetime.utcnow().replace(tzinfo=UTC)
now_pb = _datetime_to_pb_timestamp(now)
keys = [[0], [1], [2]]
keyset = KeySet(keys=keys)
response = CommitResponse(commit_timestamp=now_pb)
database = _Database()
api = database.spanner_api = _FauxSpannerAPI(
_commit_response=response)
session = _Session(database)
transaction = self._make_one(session)
transaction._id = self.TRANSACTION_ID
transaction.delete(TABLE_NAME, keyset)
transaction.commit()
self.assertEqual(transaction.committed, now)
session_id, mutations, txn_id, options = api._committed
self.assertEqual(session_id, session.name)
self.assertEqual(txn_id, self.TRANSACTION_ID)
self.assertEqual(mutations, transaction._mutations)
self.assertEqual(options.kwargs['metadata'],
[('google-cloud-resource-prefix', database.name)])
def test_context_mgr_success(self):
import datetime
from google.cloud.proto.spanner.v1.spanner_pb2 import CommitResponse
from google.cloud.proto.spanner.v1.transaction_pb2 import (
Transaction as TransactionPB)
from google.cloud._helpers import UTC
from google.cloud._helpers import _datetime_to_pb_timestamp
transaction_pb = TransactionPB(id=self.TRANSACTION_ID)
database = _Database()
now = datetime.datetime.utcnow().replace(tzinfo=UTC)
now_pb = _datetime_to_pb_timestamp(now)
response = CommitResponse(commit_timestamp=now_pb)
database = _Database()
api = database.spanner_api = _FauxSpannerAPI(
_begin_transaction_response=transaction_pb,
_commit_response=response)
session = _Session(database)
transaction = self._make_one(session)
with transaction:
transaction.insert(TABLE_NAME, COLUMNS, VALUES)
self.assertEqual(transaction.committed, now)
session_id, mutations, txn_id, options = api._committed
self.assertEqual(session_id, self.SESSION_NAME)
self.assertEqual(txn_id, self.TRANSACTION_ID)
self.assertEqual(mutations, transaction._mutations)
self.assertEqual(options.kwargs['metadata'],
[('google-cloud-resource-prefix', database.name)])
def test_context_mgr_failure(self):
from google.protobuf.empty_pb2 import Empty
empty_pb = Empty()
from google.cloud.proto.spanner.v1.transaction_pb2 import (
Transaction as TransactionPB)
transaction_pb = TransactionPB(id=self.TRANSACTION_ID)
database = _Database()
api = database.spanner_api = _FauxSpannerAPI(
_begin_transaction_response=transaction_pb,
_rollback_response=empty_pb)
session = _Session(database)
transaction = self._make_one(session)
with self.assertRaises(Exception):
with transaction:
transaction.insert(TABLE_NAME, COLUMNS, VALUES)
raise Exception("bail out")
self.assertEqual(transaction.committed, None)
self.assertTrue(transaction._rolled_back)
self.assertEqual(len(transaction._mutations), 1)
self.assertEqual(api._committed, None)
session_id, txn_id, options = api._rolled_back
self.assertEqual(session_id, session.name)
self.assertEqual(txn_id, self.TRANSACTION_ID)
self.assertEqual(options.kwargs['metadata'],
[('google-cloud-resource-prefix', database.name)])
class _Database(object):
name = 'testing'
class _Session(object):
def __init__(self, database=None, name=TestTransaction.SESSION_NAME):
self._database = database
self.name = name
class _FauxSpannerAPI(_GAXBaseAPI):
_committed = None
def begin_transaction(self, session, options_, options=None):
from google.gax.errors import GaxError
self._begun = (session, options_, options)
if self._random_gax_error:
raise GaxError('error')
return self._begin_transaction_response
def rollback(self, session, transaction_id, options=None):
from google.gax.errors import GaxError
self._rolled_back = (session, transaction_id, options)
if self._random_gax_error:
raise GaxError('error')
return self._rollback_response
def commit(self, session, mutations,
transaction_id='', single_use_transaction=None, options=None):
from google.gax.errors import GaxError
assert single_use_transaction is None
self._committed = (session, mutations, transaction_id, options)
if self._random_gax_error:
raise GaxError('error')
return self._commit_response
| daspecster/google-cloud-python | spanner/unit_tests/test_transaction.py | Python | apache-2.0 | 14,733 |
"""
Copyright 2020 Google LLC
Copyright 2020 PerfectVIPs Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
"""
import math
import logging
import vsc
from importlib import import_module
from pygen_src.isa.riscv_instr import riscv_instr
from pygen_src.riscv_instr_pkg import (pkg_ins, riscv_instr_category_t, riscv_reg_t,
riscv_instr_name_t, riscv_instr_group_t,
riscv_instr_format_t)
from pygen_src.riscv_instr_gen_config import cfg
rcs = import_module("pygen_src.target." + cfg.argv.target + ".riscv_core_setting")
@vsc.randobj
class riscv_b_instr(riscv_instr):
def __init__(self):
super().__init__()
self.rs3 = vsc.rand_enum_t(riscv_reg_t)
self.has_rs3 = vsc.bit_t(1)
def set_rand_mode(self):
super().set_rand_mode()
self.has_rs3 = 0
if self.format == riscv_instr_format_t.R_FORMAT:
if self.instr_name in [riscv_instr_name_t.CLZW,
riscv_instr_name_t.CTZW, riscv_instr_name_t.PCNTW,
riscv_instr_name_t.SEXT_B, riscv_instr_name_t.SEXT_H,
riscv_instr_name_t.CLZ, riscv_instr_name_t.CTZ,
riscv_instr_name_t.PCNT, riscv_instr_name_t.BMATFLIP,
riscv_instr_name_t.CRC32_B, riscv_instr_name_t.CRC32_H,
riscv_instr_name_t.CRC32_W, riscv_instr_name_t.CRC32C_B,
riscv_instr_name_t.CRC32C_H, riscv_instr_name_t.CRC32C_W,
riscv_instr_name_t.CRC32_D, riscv_instr_name_t.CRC32C_D]:
self.has_rs2 = 0
elif self.format == riscv_instr_format_t.R4_FORMAT:
self.has_imm = 0
self.has_rs3 = 1
elif self.format == riscv_instr_format_t.I_FORMAT:
self.has_rs2 = 0
if self.instr_name in [riscv_instr_name_t.FSRI, riscv_instr_name_t.FSRIW]:
self.has_rs3 = 1
def pre_randomize(self):
super().pre_randomize()
with vsc.raw_mode():
self.rs3.rand_mode = bool(self.has_rs3)
def set_imm_len(self):
if self.format == riscv_instr_format_t.I_FORMAT:
if self.category in [riscv_instr_category_t.SHIFT, riscv_instr_category_t.LOGICAL]:
if (self.group.name == riscv_instr_group_t.RV64B and
self.instr_name != riscv_instr_name_t.SLLIU_W):
self.imm_len = math.ceil(math.log2(rcs.XLEN)) - 1
else:
self.imm_len = math.ceil(math.log2(rcs.XLEN))
# ARITHMETIC RV32B
if self.instr_name in [riscv_instr_name_t.SHFLI, riscv_instr_name_t.UNSHFLI]:
self.imm_len = math.ceil(math.log2(rcs.XLEN)) - 1
# ARITHMETIC RV64B
if self.instr_name == riscv_instr_name_t.ADDIWU:
self.imm_len = 12
self.imm_mask = self.imm_mask << self.imm_len
# Convert the instruction to assembly code
def convert2asm(self, prefix = " "):
asm_str_final = ""
asm_str = ""
asm_str = pkg_ins.format_string(self.get_instr_name(), pkg_ins.MAX_INSTR_STR_LEN)
if self.format == riscv_instr_format_t.I_FORMAT:
if self.instr_name in [riscv_instr_name_t.FSRI,
riscv_instr_name_t.FSRIW]: # instr rd, rs1, rs3, imm
asm_str_final = "{}{}, {}, {}, {}".format(asm_str, self.rd.name, self.rs1.name,
self.rs3.name, self.get_imm())
elif self.format == riscv_instr_format_t.R_FORMAT: # instr rd, rs1
if not self.has_rs2:
asm_str_final = "{}{}, {}".format(asm_str, self.rd.name, self.rs1.name)
elif self.format == riscv_instr_format_t.R4_FORMAT: # instr rd, rs1, rs2, rs3
asm_str_final = "{}{}, {}, {}, {}".format(asm_str, self.rd.name, self.rs1.name,
self.rs2.name, self.rs3.name)
else:
logging.info("Unsupported format {}".format(self.format))
if asm_str_final == "":
return super().convert2asm(prefix)
if self.comment != "":
asm_str_final = asm_str_final + " #" + self.comment
return asm_str_final.lower()
def get_opcode(self):
# TODO
pass
def get_func3(self):
# TODO
pass
def get_func5(self):
# TODO
pass
def get_func2(self):
# TODO
pass
# Convert the instruction to assembly code
def convert2bin(self, prefix):
pass
def is_supported(self, cfg):
return (cfg.enable_b_extension and
("ZBB" in cfg.enable_bitmanip_groups and self.instr_name.name in
["CLZ", "CTZ", "CLZW", "CTZW", "PCNT", "PCNTW",
"SLO", "SLOI", "SLOW", "SLOIW",
"SRO", "SROI", "SROW", "SROIW",
"MIN", "MINU", "MAX", "MAXU",
"ADDWU", "ADDIWU", "SUBWU",
"ADDU_W", "SUBU_W",
"SLLIU_W",
"ANDN", "ORN",
"XNOR", "PACK", "PACKW", "PACKU", "PACKUW", "PACKH",
"ROL", "ROLW", "ROR", "RORW", "RORI", "RORIW"
]) or
("ZBS" in cfg.enable_bitmanip_groups and self.instr_name.name in
["SBSET", "SBSETW", "SBSETI", "SBSETIW",
"SBCLR", "SBCLRW", "SBCLRI", "SBCLRIW",
"SBINV", "SBINVW", "SBINVI", "SBINVIW",
"SBEXT", "SBEXTW", "SBEXTI"
]) or
("ZBP" in cfg.enable_bitmanip_groups and self.instr_name.name in
["GREV", "GREVW", "GREVI", "GREVIW",
"GORC", "GORCW", "GORCI", "GORCIW",
"SHFL", "SHFLW", "UNSHFL", "UNSHFLW", "SHFLI", "UNSHFLI"
]) or
("ZBE" in cfg.enable_bitmanip_groups and self.instr_name in
["BEXT", "BEXTW",
"BDEP", "BDEPW"
]) or
("ZBF" in cfg.enable_bitmanip_groups and self.instr_name.name in
["BFP", "BFPW"
]) or
("ZBC" in cfg.enable_bitmanip_groups and self.instr_name.name in
["CLMUL", "CLMULW", "CLMULH", "CLMULHW", "CLMULR", "CLMULRW"
]) or
("ZBR" in cfg.enable_bitmanip_groups and self.instr_name.name in
["CRC32_B", "CRC32_H", "CRC32_W", "CRC32_D",
"CRC32C_B", "CRC32C_H", "CRC32C_W", "CRC32C_D"
]) or
("ZBM" in cfg.enable_bitmanip_groups and self.instr_name.name in
["BMATOR", "BMATXOR", "BMATFLIP"
]) or
("ZBT" in cfg.enable_bitmanip_groups and self.instr_name.name in
["CMOV", "CMIX",
"FSL", "FSLW", "FSR", "FSRW", "FSRI", "FSRIW"
]) or
# TODO, spec 0.92 doesn't categorize these 2 instr, put them in ZB_TMP #572
("ZB_TMP" in cfg.enable_bitmanip_groups and self.instr_name.name in
["SEXT_B", "SEXT_H"
])
)
# Coverage related functions
def update_src_regs(self, operands):
# TODO
pass
| lowRISC/ibex | vendor/google_riscv-dv/pygen/pygen_src/isa/riscv_b_instr.py | Python | apache-2.0 | 7,835 |
import pytest
from charitybot2.persistence.heartbeat_sqlite_repository import HeartbeatSQLiteRepository, NonExistentHeartbeatSource
heartbeat_repository = HeartbeatSQLiteRepository(debug=True)
heartbeat_repository.store_heartbeat(source='test_source', state='test_state', timestamp=1)
def teardown_module():
heartbeat_repository.close_connection()
class TestHeartbeatSQLiteRepository:
def test_retrieving_last_heartbeat(self):
last_heartbeat = heartbeat_repository.get_last_heartbeat(source='test_source')
assert 'test_source' == last_heartbeat['source']
assert 'test_state' == last_heartbeat['state']
assert 1 == last_heartbeat['timestamp']
def test_insert_heartbeat_state(self):
heartbeat_repository.store_heartbeat(source='test_insert_source', state='test_insert_state', timestamp=2)
new_heartbeat = heartbeat_repository.get_last_heartbeat(source='test_insert_source')
assert 'test_insert_source' == new_heartbeat['source']
assert 'test_insert_state' == new_heartbeat['state']
assert 2 == new_heartbeat['timestamp']
class TestHeartbeatSQLiteRepositoryExceptions:
def test_retrieving_from_non_existent_source_throws_exception(self):
with pytest.raises(NonExistentHeartbeatSource):
nonexistent_source_heartbeat = heartbeat_repository.get_last_heartbeat(source='non-existent')
| purrcat259/charitybot2 | tests/integration/test_heartbeat_sqlite_repository.py | Python | gpl-3.0 | 1,394 |
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Libbeato(AutotoolsPackage):
"""libbeato is a C library containing routines for various uses in Genomics,
and includes a copy of the freeware portion of the C library
from UCSC's Genome Browser Group."""
homepage = "https://github.com/CRG-Barcelona/libbeato"
git = "https://github.com/CRG-Barcelona/libbeato.git"
version('master', brancch='master')
| rspavel/spack | var/spack/repos/builtin/packages/libbeato/package.py | Python | lgpl-2.1 | 605 |
# coding=utf-8
# Copyright (c) 2015 EMC Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
import six
from storops.vnx.resource import VNXCliResourceList
from storops.vnx.resource import VNXCliResource
from storops import exception as ex
__author__ = 'Cedric Zhuang'
class VNXSnapList(VNXCliResourceList):
def __init__(self, cli=None, res=None):
super(VNXSnapList, self).__init__(cli=cli)
self._res = res
@classmethod
def get_resource_class(cls):
return VNXSnap
def _get_raw_resource(self):
return self._cli.get_snap(res=self._res)
class VNXSnap(VNXCliResource):
def __init__(self, name=None, cli=None):
super(VNXSnap, self).__init__()
self._cli = cli
self._name = name
@classmethod
def create(cls, cli, res, name, allow_rw=None, auto_delete=None,
keep_for=None):
out = cli.create_snap(res, name, allow_rw, auto_delete, keep_for)
msg = 'failed to create snap "{}" for {}'.format(name, res)
ex.raise_if_err(out, msg, default=ex.VNXCreateSnapError)
return VNXSnap(name, cli=cli)
def _get_raw_resource(self):
return self._cli.get_snap(name=self._name, poll=self.poll)
@classmethod
def get(cls, cli, name=None):
if name is None:
ret = VNXSnapList(cli)
else:
ret = VNXSnap(name, cli)
return ret
def delete(self):
name = self._get_name()
out = self._cli.delete_snap(name, poll=self.poll)
ex.raise_if_err(out, default=ex.VNXDeleteSnapError)
def copy(self, new_name,
ignore_migration_check=False,
ignore_dedup_check=False):
name = self._get_name()
out = self._cli.copy_snap(name, new_name,
ignore_migration_check,
ignore_dedup_check, poll=self.poll)
ex.raise_if_err(out, 'failed to copy snap {}.'.format(name),
default=ex.VNXSnapError)
return VNXSnap(name=new_name, cli=self._cli)
def modify(self, new_name=None, desc=None,
auto_delete=None, allow_rw=None, keep_for=None):
name = self._get_name()
out = self._cli.modify_snap(name, new_name, desc, auto_delete,
allow_rw, keep_for, poll=self.poll)
ex.raise_if_err(out, 'failed to modify snap {}.'.format(name),
default=ex.VNXModifySnapError)
if new_name is not None:
self._name = new_name
@staticmethod
def get_name(snap):
if isinstance(snap, VNXSnap):
if snap._name is not None:
ret = snap._name
else:
ret = snap.name
elif isinstance(snap, six.string_types):
ret = snap
else:
raise ValueError('invalid snap.')
return ret
def restore(self, res_id, backup_snap=None):
"""
Restores a snapshot.
:param res_id: the LUN number of primary LUN or snapshot mount point to
be restored.
:param backup_snap: the name of a backup snapshot to be created before
restoring.
"""
name = self._get_name()
out = self._cli.restore_snap(name, res_id, backup_snap)
ex.raise_if_err(out, 'failed to restore snap {}.'.format(name),
default=ex.VNXSnapError)
| emc-openstack/storops | storops/vnx/resource/snap.py | Python | apache-2.0 | 4,028 |
import base64
import hashlib
import re
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.models import User
from django.db.models import Q
from mozilla_django_oidc.auth import OIDCAuthenticationBackend
from mozillians.users.models import ExternalAccount
def calculate_username(email):
"""Calculate username from email address."""
email = email.split('@')[0]
username = re.sub(r'[^\w.@+-]', '-', email)
username = username[:settings.USERNAME_MAX_LENGTH]
suggested_username = username
count = 0
while User.objects.filter(username=suggested_username).exists():
count += 1
suggested_username = '%s%d' % (username, count)
if len(suggested_username) > settings.USERNAME_MAX_LENGTH:
# We failed to calculate a name for you, default to a
# email digest.
return base64.urlsafe_b64encode(hashlib.sha1(email).digest()).rstrip('=')
return suggested_username
class MozilliansAuthBackend(OIDCAuthenticationBackend):
"""Override OIDCAuthenticationBackend to provide custom functionality."""
def filter_users_by_claims(self, claims):
"""Override default method to add multiple emails in an account."""
email = claims.get('email')
request_user = self.request.user
if not email:
return self.UserModel.objects.none()
account_type = ExternalAccount.TYPE_EMAIL
alternate_emails = ExternalAccount.objects.filter(type=account_type, identifier=email)
primary_email_qs = Q(email=email)
alternate_email_qs = Q(userprofile__externalaccount=alternate_emails)
user_q = self.UserModel.objects.filter(primary_email_qs | alternate_email_qs).distinct()
# In this case we have a registered user who is adding a secondary email
if request_user.is_authenticated():
if not user_q:
ExternalAccount.objects.create(type=account_type,
user=request_user.userprofile,
identifier=email)
else:
if not user_q.filter(pk=request_user.id).exists():
msg = u'Email {0} already exists in the database.'.format(email)
messages.error(self.request, msg)
return [request_user]
return user_q
| fxa90id/mozillians | mozillians/common/authbackend.py | Python | bsd-3-clause | 2,405 |
import tabulate
import technic.solder.cli.command
class ModpackCommand(technic.solder.cli.command.Command):
name = 'modpack'
command_help = 'Get information about modpacks'
def setup(self, parser):
subparsers = parser.add_subparsers()
self.add_subcommand(subparsers, ListModpacksCommand())
self.add_subcommand(subparsers, GetModpackCommand())
def run(self, client, arguments):
pass # This is a wrapper command so it will never actually be called
def skip_handling(self):
return True
class ListModpacksCommand(technic.solder.cli.command.Command):
name = 'list'
command_help = 'List all available modpacks'
def setup(self, parser):
pass
def run(self, client, arguments):
self.output(
'',
tabulate.tabulate(
[
[slug, name]
for slug, name in client.modpacks.items()
],
headers = ['Slug', 'Name'],
)
)
class GetModpackCommand(technic.solder.cli.command.Command):
name = 'get'
command_help = 'Get information about a modpack'
def setup(self, parser):
parser.add_argument(
'modpack_slug',
type = str,
help = 'The modpack slug',
)
def run(self, client, arguments):
modpack = client.get_modpack_info(arguments.modpack_slug)
self.success(modpack['display_name'])
self.output(
'',
tabulate.tabulate(
[
['Slug', modpack['name']],
['URL', modpack['url']],
['Recommended Build', modpack['recommended']],
['Latest Build', modpack['latest']],
['Builds', ', '.join(modpack['builds'][:10])],
]
)
)
| durandj/technic-solder-client | technic/solder/cli/commands/modpacks.py | Python | mit | 1,585 |
# -*- coding: utf-8 -*-
import ast
import base64
import csv
import glob
import itertools
import logging
import operator
import datetime
import hashlib
import os
import re
import simplejson
import time
import urllib
import urllib2
import urlparse
import xmlrpclib
import zlib
from xml.etree import ElementTree
from cStringIO import StringIO
import babel.messages.pofile
import werkzeug.utils
import werkzeug.wrappers
try:
import xlwt
except ImportError:
xlwt = None
import openerp
import openerp.modules.registry
from openerp.tools.translate import _
from openerp.tools import config
from .. import http
openerpweb = http
#----------------------------------------------------------
# OpenERP Web helpers
#----------------------------------------------------------
def rjsmin(script):
""" Minify js with a clever regex.
Taken from http://opensource.perlig.de/rjsmin
Apache License, Version 2.0 """
def subber(match):
""" Substitution callback """
groups = match.groups()
return (
groups[0] or
groups[1] or
groups[2] or
groups[3] or
(groups[4] and '\n') or
(groups[5] and ' ') or
(groups[6] and ' ') or
(groups[7] and ' ') or
''
)
result = re.sub(
r'([^\047"/\000-\040]+)|((?:(?:\047[^\047\\\r\n]*(?:\\(?:[^\r\n]|\r?'
r'\n|\r)[^\047\\\r\n]*)*\047)|(?:"[^"\\\r\n]*(?:\\(?:[^\r\n]|\r?\n|'
r'\r)[^"\\\r\n]*)*"))[^\047"/\000-\040]*)|(?:(?<=[(,=:\[!&|?{};\r\n]'
r')(?:[\000-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/'
r'))*((?:/(?![\r\n/*])[^/\\\[\r\n]*(?:(?:\\[^\r\n]|(?:\[[^\\\]\r\n]*'
r'(?:\\[^\r\n][^\\\]\r\n]*)*\]))[^/\\\[\r\n]*)*/)[^\047"/\000-\040]*'
r'))|(?:(?<=[\000-#%-,./:-@\[-^`{-~-]return)(?:[\000-\011\013\014\01'
r'6-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))*((?:/(?![\r\n/*])[^/'
r'\\\[\r\n]*(?:(?:\\[^\r\n]|(?:\[[^\\\]\r\n]*(?:\\[^\r\n][^\\\]\r\n]'
r'*)*\]))[^/\\\[\r\n]*)*/)[^\047"/\000-\040]*))|(?<=[^\000-!#%&(*,./'
r':-@\[\\^`{|~])(?:[\000-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/'
r'*][^*]*\*+)*/))*(?:((?:(?://[^\r\n]*)?[\r\n]))(?:[\000-\011\013\01'
r'4\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))*)+(?=[^\000-\040"#'
r'%-\047)*,./:-@\\-^`|-~])|(?<=[^\000-#%-,./:-@\[-^`{-~-])((?:[\000-'
r'\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)))+(?=[^'
r'\000-#%-,./:-@\[-^`{-~-])|(?<=\+)((?:[\000-\011\013\014\016-\040]|'
r'(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)))+(?=\+)|(?<=-)((?:[\000-\011\0'
r'13\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)))+(?=-)|(?:[\0'
r'00-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))+|(?:'
r'(?:(?://[^\r\n]*)?[\r\n])(?:[\000-\011\013\014\016-\040]|(?:/\*[^*'
r']*\*+(?:[^/*][^*]*\*+)*/))*)+', subber, '\n%s\n' % script
).strip()
return result
def db_list(req, force=False):
proxy = req.session.proxy("db")
dbs = proxy.list(force)
h = req.httprequest.environ['HTTP_HOST'].split(':')[0]
d = h.split('.')[0]
r = openerp.tools.config['dbfilter'].replace('%h', h).replace('%d', d)
dbs = [i for i in dbs if re.match(r, i)]
return dbs
def db_monodb_redirect(req):
return db_redirect(req, not config['list_db'])
def db_redirect(req, match_first_only_if_unique):
db = False
redirect = False
# 1 try the db in the url
db_url = req.params.get('db')
if db_url:
return (db_url, False)
dbs = db_list(req, True)
# 2 use the database from the cookie if it's listable and still listed
cookie_db = req.httprequest.cookies.get('last_used_database')
if cookie_db in dbs:
db = cookie_db
# 3 use the first db if user can list databases
if dbs and not db and (not match_first_only_if_unique or len(dbs) == 1):
db = dbs[0]
# redirect to the chosen db if multiple are available
if db and len(dbs) > 1:
query = dict(urlparse.parse_qsl(req.httprequest.query_string, keep_blank_values=True))
query.update({'db': db})
redirect = req.httprequest.path + '?' + urllib.urlencode(query)
return (db, redirect)
def db_monodb(req):
# if only one db exists, return it else return False
return db_redirect(req, True)[0]
def redirect_with_hash(req, url, code=303):
if req.httprequest.user_agent.browser == 'msie':
try:
version = float(req.httprequest.user_agent.version)
if version < 10:
return "<html><head><script>window.location = '%s#' + location.hash;</script></head></html>" % url
except Exception:
pass
return werkzeug.utils.redirect(url, code)
def module_topological_sort(modules):
""" Return a list of module names sorted so that their dependencies of the
modules are listed before the module itself
modules is a dict of {module_name: dependencies}
:param modules: modules to sort
:type modules: dict
:returns: list(str)
"""
dependencies = set(itertools.chain.from_iterable(modules.itervalues()))
# incoming edge: dependency on other module (if a depends on b, a has an
# incoming edge from b, aka there's an edge from b to a)
# outgoing edge: other module depending on this one
# [Tarjan 1976], http://en.wikipedia.org/wiki/Topological_sorting#Algorithms
#L ← Empty list that will contain the sorted nodes
L = []
#S ← Set of all nodes with no outgoing edges (modules on which no other
# module depends)
S = set(module for module in modules if module not in dependencies)
visited = set()
#function visit(node n)
def visit(n):
#if n has not been visited yet then
if n not in visited:
#mark n as visited
visited.add(n)
#change: n not web module, can not be resolved, ignore
if n not in modules: return
#for each node m with an edge from m to n do (dependencies of n)
for m in modules[n]:
#visit(m)
visit(m)
#add n to L
L.append(n)
#for each node n in S do
for n in S:
#visit(n)
visit(n)
return L
def module_installed(req):
# Candidates module the current heuristic is the /static dir
loadable = openerpweb.addons_manifest.keys()
modules = {}
# Retrieve database installed modules
# TODO The following code should move to ir.module.module.list_installed_modules()
Modules = req.session.model('ir.module.module')
domain = [('state','=','installed'), ('name','in', loadable)]
for module in Modules.search_read(domain, ['name', 'dependencies_id']):
modules[module['name']] = []
deps = module.get('dependencies_id')
if deps:
deps_read = req.session.model('ir.module.module.dependency').read(deps, ['name'])
dependencies = [i['name'] for i in deps_read]
modules[module['name']] = dependencies
sorted_modules = module_topological_sort(modules)
return sorted_modules
def module_installed_bypass_session(dbname):
loadable = openerpweb.addons_manifest.keys()
modules = {}
try:
registry = openerp.modules.registry.RegistryManager.get(dbname)
with registry.cursor() as cr:
m = registry.get('ir.module.module')
# TODO The following code should move to ir.module.module.list_installed_modules()
domain = [('state','=','installed'), ('name','in', loadable)]
ids = m.search(cr, 1, [('state','=','installed'), ('name','in', loadable)])
for module in m.read(cr, 1, ids, ['name', 'dependencies_id']):
modules[module['name']] = []
deps = module.get('dependencies_id')
if deps:
deps_read = registry.get('ir.module.module.dependency').read(cr, 1, deps, ['name'])
dependencies = [i['name'] for i in deps_read]
modules[module['name']] = dependencies
except Exception,e:
pass
sorted_modules = module_topological_sort(modules)
return sorted_modules
def module_boot(req, db=None):
server_wide_modules = openerp.conf.server_wide_modules or ['web']
serverside = []
dbside = []
for i in server_wide_modules:
if i in openerpweb.addons_manifest:
serverside.append(i)
monodb = db or db_monodb(req)
if monodb:
dbside = module_installed_bypass_session(monodb)
dbside = [i for i in dbside if i not in serverside]
addons = serverside + dbside
return addons
def concat_xml(file_list):
"""Concatenate xml files
:param list(str) file_list: list of files to check
:returns: (concatenation_result, checksum)
:rtype: (str, str)
"""
checksum = hashlib.new('sha1')
if not file_list:
return '', checksum.hexdigest()
root = None
for fname in file_list:
with open(fname, 'rb') as fp:
contents = fp.read()
checksum.update(contents)
fp.seek(0)
xml = ElementTree.parse(fp).getroot()
if root is None:
root = ElementTree.Element(xml.tag)
#elif root.tag != xml.tag:
# raise ValueError("Root tags missmatch: %r != %r" % (root.tag, xml.tag))
for child in xml.getchildren():
root.append(child)
return ElementTree.tostring(root, 'utf-8'), checksum.hexdigest()
def concat_files(file_list, reader=None, intersperse=""):
""" Concatenates contents of all provided files
:param list(str) file_list: list of files to check
:param function reader: reading procedure for each file
:param str intersperse: string to intersperse between file contents
:returns: (concatenation_result, checksum)
:rtype: (str, str)
"""
checksum = hashlib.new('sha1')
if not file_list:
return '', checksum.hexdigest()
if reader is None:
def reader(f):
import codecs
with codecs.open(f, 'rb', "utf-8-sig") as fp:
return fp.read().encode("utf-8")
files_content = []
for fname in file_list:
contents = reader(fname)
checksum.update(contents)
files_content.append(contents)
files_concat = intersperse.join(files_content)
return files_concat, checksum.hexdigest()
concat_js_cache = {}
def concat_js(file_list):
content, checksum = concat_files(file_list, intersperse=';')
if checksum in concat_js_cache:
content = concat_js_cache[checksum]
else:
content = rjsmin(content)
concat_js_cache[checksum] = content
return content, checksum
def fs2web(path):
"""convert FS path into web path"""
return '/'.join(path.split(os.path.sep))
def manifest_glob(req, extension, addons=None, db=None):
if addons is None:
addons = module_boot(req, db=db)
else:
addons = addons.split(',')
r = []
for addon in addons:
manifest = openerpweb.addons_manifest.get(addon, None)
if not manifest:
continue
# ensure does not ends with /
addons_path = os.path.join(manifest['addons_path'], '')[:-1]
globlist = manifest.get(extension, [])
for pattern in globlist:
for path in glob.glob(os.path.normpath(os.path.join(addons_path, addon, pattern))):
r.append((path, fs2web(path[len(addons_path):])))
return r
def manifest_list(req, extension, mods=None, db=None):
""" list ressources to load specifying either:
mods: a comma separated string listing modules
db: a database name (return all installed modules in that database)
"""
if not req.debug:
path = '/web/webclient/' + extension
if mods is not None:
path += '?' + urllib.urlencode({'mods': mods})
elif db:
path += '?' + urllib.urlencode({'db': db})
return [path]
files = manifest_glob(req, extension, addons=mods, db=db)
return [wp for _fp, wp in files]
def get_last_modified(files):
""" Returns the modification time of the most recently modified
file provided
:param list(str) files: names of files to check
:return: most recent modification time amongst the fileset
:rtype: datetime.datetime
"""
files = list(files)
if files:
return max(datetime.datetime.fromtimestamp(os.path.getmtime(f))
for f in files)
return datetime.datetime(1970, 1, 1)
def make_conditional(req, response, last_modified=None, etag=None):
""" Makes the provided response conditional based upon the request,
and mandates revalidation from clients
Uses Werkzeug's own :meth:`ETagResponseMixin.make_conditional`, after
setting ``last_modified`` and ``etag`` correctly on the response object
:param req: OpenERP request
:type req: web.common.http.WebRequest
:param response: Werkzeug response
:type response: werkzeug.wrappers.Response
:param datetime.datetime last_modified: last modification date of the response content
:param str etag: some sort of checksum of the content (deep etag)
:return: the response object provided
:rtype: werkzeug.wrappers.Response
"""
response.cache_control.must_revalidate = True
response.cache_control.max_age = 0
if last_modified:
response.last_modified = last_modified
if etag:
response.set_etag(etag)
return response.make_conditional(req.httprequest)
def login_and_redirect(req, db, login, key, redirect_url='/'):
wsgienv = req.httprequest.environ
env = dict(
base_location=req.httprequest.url_root.rstrip('/'),
HTTP_HOST=wsgienv['HTTP_HOST'],
REMOTE_ADDR=wsgienv['REMOTE_ADDR'],
)
req.session.authenticate(db, login, key, env)
return set_cookie_and_redirect(req, redirect_url)
def set_cookie_and_redirect(req, redirect_url):
redirect = werkzeug.utils.redirect(redirect_url, 303)
redirect.autocorrect_location_header = False
cookie_val = urllib2.quote(simplejson.dumps(req.session_id))
redirect.set_cookie('instance0|session_id', cookie_val)
return redirect
def load_actions_from_ir_values(req, key, key2, models, meta):
Values = req.session.model('ir.values')
actions = Values.get(key, key2, models, meta, req.context)
return [(id, name, clean_action(req, action))
for id, name, action in actions]
def clean_action(req, action):
action.setdefault('flags', {})
action_type = action.setdefault('type', 'ir.actions.act_window_close')
if action_type == 'ir.actions.act_window':
return fix_view_modes(action)
return action
# I think generate_views,fix_view_modes should go into js ActionManager
def generate_views(action):
"""
While the server generates a sequence called "views" computing dependencies
between a bunch of stuff for views coming directly from the database
(the ``ir.actions.act_window model``), it's also possible for e.g. buttons
to return custom view dictionaries generated on the fly.
In that case, there is no ``views`` key available on the action.
Since the web client relies on ``action['views']``, generate it here from
``view_mode`` and ``view_id``.
Currently handles two different cases:
* no view_id, multiple view_mode
* single view_id, single view_mode
:param dict action: action descriptor dictionary to generate a views key for
"""
view_id = action.get('view_id') or False
if isinstance(view_id, (list, tuple)):
view_id = view_id[0]
# providing at least one view mode is a requirement, not an option
view_modes = action['view_mode'].split(',')
if len(view_modes) > 1:
if view_id:
raise ValueError('Non-db action dictionaries should provide '
'either multiple view modes or a single view '
'mode and an optional view id.\n\n Got view '
'modes %r and view id %r for action %r' % (
view_modes, view_id, action))
action['views'] = [(False, mode) for mode in view_modes]
return
action['views'] = [(view_id, view_modes[0])]
def fix_view_modes(action):
""" For historical reasons, OpenERP has weird dealings in relation to
view_mode and the view_type attribute (on window actions):
* one of the view modes is ``tree``, which stands for both list views
and tree views
* the choice is made by checking ``view_type``, which is either
``form`` for a list view or ``tree`` for an actual tree view
This methods simply folds the view_type into view_mode by adding a
new view mode ``list`` which is the result of the ``tree`` view_mode
in conjunction with the ``form`` view_type.
TODO: this should go into the doc, some kind of "peculiarities" section
:param dict action: an action descriptor
:returns: nothing, the action is modified in place
"""
if not action.get('views'):
generate_views(action)
if action.pop('view_type', 'form') != 'form':
return action
if 'view_mode' in action:
action['view_mode'] = ','.join(
mode if mode != 'tree' else 'list'
for mode in action['view_mode'].split(','))
action['views'] = [
[id, mode if mode != 'tree' else 'list']
for id, mode in action['views']
]
return action
def _local_web_translations(trans_file):
messages = []
try:
with open(trans_file) as t_file:
po = babel.messages.pofile.read_po(t_file)
except Exception:
return
for x in po:
if x.id and x.string and "openerp-web" in x.auto_comments:
messages.append({'id': x.id, 'string': x.string})
return messages
def xml2json_from_elementtree(el, preserve_whitespaces=False):
""" xml2json-direct
Simple and straightforward XML-to-JSON converter in Python
New BSD Licensed
http://code.google.com/p/xml2json-direct/
"""
res = {}
if el.tag[0] == "{":
ns, name = el.tag.rsplit("}", 1)
res["tag"] = name
res["namespace"] = ns[1:]
else:
res["tag"] = el.tag
res["attrs"] = {}
for k, v in el.items():
res["attrs"][k] = v
kids = []
if el.text and (preserve_whitespaces or el.text.strip() != ''):
kids.append(el.text)
for kid in el:
kids.append(xml2json_from_elementtree(kid, preserve_whitespaces))
if kid.tail and (preserve_whitespaces or kid.tail.strip() != ''):
kids.append(kid.tail)
res["children"] = kids
return res
def content_disposition(filename, req):
filename = filename.encode('utf8')
escaped = urllib2.quote(filename)
browser = req.httprequest.user_agent.browser
version = int((req.httprequest.user_agent.version or '0').split('.')[0])
if browser == 'msie' and version < 9:
return "attachment; filename=%s" % escaped
elif browser == 'safari':
return "attachment; filename=%s" % filename
else:
return "attachment; filename*=UTF-8''%s" % escaped
#----------------------------------------------------------
# OpenERP Web web Controllers
#----------------------------------------------------------
html_template = """<!DOCTYPE html>
<html style="height: 100%%">
<head>
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1"/>
<meta http-equiv="content-type" content="text/html; charset=utf-8" />
<title>OpenERP</title>
<link rel="shortcut icon" href="/web/static/src/img/favicon.ico" type="image/x-icon"/>
<link rel="stylesheet" href="/web/static/src/css/full.css" />
%(css)s
%(js)s
<script type="text/javascript">
$(function() {
var s = new openerp.init(%(modules)s);
%(init)s
});
</script>
</head>
<body>
<!--[if lte IE 8]>
<script src="//ajax.googleapis.com/ajax/libs/chrome-frame/1/CFInstall.min.js"></script>
<script>CFInstall.check({mode: "overlay"});</script>
<![endif]-->
</body>
</html>
"""
class Home(openerpweb.Controller):
_cp_path = '/'
@openerpweb.httprequest
def index(self, req, s_action=None, db=None, **kw):
db, redir = db_monodb_redirect(req)
if redir:
return redirect_with_hash(req, redir)
js = "\n ".join('<script type="text/javascript" src="%s"></script>' % i for i in manifest_list(req, 'js', db=db))
css = "\n ".join('<link rel="stylesheet" href="%s">' % i for i in manifest_list(req, 'css', db=db))
r = html_template % {
'js': js,
'css': css,
'modules': simplejson.dumps(module_boot(req, db=db)),
'init': 'var wc = new s.web.WebClient();wc.appendTo($(document.body));'
}
return r
@openerpweb.httprequest
def login(self, req, db, login, key):
return login_and_redirect(req, db, login, key)
class WebClient(openerpweb.Controller):
_cp_path = "/web/webclient"
@openerpweb.jsonrequest
def csslist(self, req, mods=None):
return manifest_list(req, 'css', mods=mods)
@openerpweb.jsonrequest
def jslist(self, req, mods=None):
return manifest_list(req, 'js', mods=mods)
@openerpweb.jsonrequest
def qweblist(self, req, mods=None):
return manifest_list(req, 'qweb', mods=mods)
@openerpweb.httprequest
def css(self, req, mods=None, db=None):
files = list(manifest_glob(req, 'css', addons=mods, db=db))
last_modified = get_last_modified(f[0] for f in files)
if req.httprequest.if_modified_since and req.httprequest.if_modified_since >= last_modified:
return werkzeug.wrappers.Response(status=304)
file_map = dict(files)
rx_import = re.compile(r"""@import\s+('|")(?!'|"|/|https?://)""", re.U)
rx_url = re.compile(r"""url\s*\(\s*('|"|)(?!'|"|/|https?://|data:)""", re.U)
def reader(f):
"""read the a css file and absolutify all relative uris"""
with open(f, 'rb') as fp:
data = fp.read().decode('utf-8')
path = file_map[f]
web_dir = os.path.dirname(path)
data = re.sub(
rx_import,
r"""@import \1%s/""" % (web_dir,),
data,
)
data = re.sub(
rx_url,
r"""url(\1%s/""" % (web_dir,),
data,
)
return data.encode('utf-8')
content, checksum = concat_files((f[0] for f in files), reader)
# move up all @import and @charset rules to the top
matches = []
def push(matchobj):
matches.append(matchobj.group(0))
return ''
content = re.sub(re.compile("(@charset.+;$)", re.M), push, content)
content = re.sub(re.compile("(@import.+;$)", re.M), push, content)
matches.append(content)
content = '\n'.join(matches)
return make_conditional(
req, req.make_response(content, [('Content-Type', 'text/css')]),
last_modified, checksum)
@openerpweb.httprequest
def js(self, req, mods=None, db=None):
files = [f[0] for f in manifest_glob(req, 'js', addons=mods, db=db)]
last_modified = get_last_modified(files)
if req.httprequest.if_modified_since and req.httprequest.if_modified_since >= last_modified:
return werkzeug.wrappers.Response(status=304)
content, checksum = concat_js(files)
return make_conditional(
req, req.make_response(content, [('Content-Type', 'application/javascript')]),
last_modified, checksum)
@openerpweb.httprequest
def qweb(self, req, mods=None, db=None):
files = [f[0] for f in manifest_glob(req, 'qweb', addons=mods, db=db)]
last_modified = get_last_modified(files)
if req.httprequest.if_modified_since and req.httprequest.if_modified_since >= last_modified:
return werkzeug.wrappers.Response(status=304)
content, checksum = concat_xml(files)
return make_conditional(
req, req.make_response(content, [('Content-Type', 'text/xml')]),
last_modified, checksum)
@openerpweb.jsonrequest
def bootstrap_translations(self, req, mods):
""" Load local translations from *.po files, as a temporary solution
until we have established a valid session. This is meant only
for translating the login page and db management chrome, using
the browser's language. """
# For performance reasons we only load a single translation, so for
# sub-languages (that should only be partially translated) we load the
# main language PO instead - that should be enough for the login screen.
if req.lang in ('zh_CN', 'zh_TW', 'zh_HK'):
lang = req.lang
else:
lang = req.lang.split('_')[0]
translations_per_module = {}
for addon_name in mods:
if openerpweb.addons_manifest[addon_name].get('bootstrap'):
addons_path = openerpweb.addons_manifest[addon_name]['addons_path']
f_name = os.path.join(addons_path, addon_name, "i18n", lang + ".po")
if not os.path.exists(f_name):
continue
translations_per_module[addon_name] = {'messages': _local_web_translations(f_name)}
return {"modules": translations_per_module,
"lang_parameters": None}
@openerpweb.jsonrequest
def translations(self, req, mods, lang):
res_lang = req.session.model('res.lang')
ids = res_lang.search([("code", "=", lang)])
lang_params = None
if ids:
lang_params = res_lang.read(ids[0], ["direction", "date_format", "time_format",
"grouping", "decimal_point", "thousands_sep"])
# Regional languages (ll_CC) must inherit/override their parent lang (ll), but this is
# done server-side when the language is loaded, so we only need to load the user's lang.
ir_translation = req.session.model('ir.translation')
translations_per_module = {}
messages = ir_translation.search_read([('module','in',mods),('lang','=',lang),
('comments','like','openerp-web'),('value','!=',False),
('value','!=','')],
['module','src','value','lang'], order='module')
for mod, msg_group in itertools.groupby(messages, key=operator.itemgetter('module')):
translations_per_module.setdefault(mod,{'messages':[]})
translations_per_module[mod]['messages'].extend({'id': m['src'],
'string': m['value']} \
for m in msg_group)
return {"modules": translations_per_module,
"lang_parameters": lang_params}
@openerpweb.jsonrequest
def version_info(self, req):
return openerp.service.web_services.RPC_VERSION_1
class Proxy(openerpweb.Controller):
_cp_path = '/web/proxy'
@openerpweb.jsonrequest
def load(self, req, path):
""" Proxies an HTTP request through a JSON request.
It is strongly recommended to not request binary files through this,
as the result will be a binary data blob as well.
:param req: OpenERP request
:param path: actual request path
:return: file content
"""
from werkzeug.test import Client
from werkzeug.wrappers import BaseResponse
return Client(req.httprequest.app, BaseResponse).get(path).data
class Database(openerpweb.Controller):
_cp_path = "/web/database"
@openerpweb.jsonrequest
def get_list(self, req):
# TODO change js to avoid calling this method if in monodb mode
try:
return db_list(req)
except xmlrpclib.Fault:
monodb = db_monodb(req)
if monodb:
return [monodb]
raise
@openerpweb.jsonrequest
def create(self, req, fields):
params = dict(map(operator.itemgetter('name', 'value'), fields))
return req.session.proxy("db").create_database(
params['super_admin_pwd'],
params['db_name'],
bool(params.get('demo_data')),
params['db_lang'],
params['create_admin_pwd'])
@openerpweb.jsonrequest
def duplicate(self, req, fields):
params = dict(map(operator.itemgetter('name', 'value'), fields))
duplicate_attrs = (
params['super_admin_pwd'],
params['db_original_name'],
params['db_name'],
)
return req.session.proxy("db").duplicate_database(*duplicate_attrs)
@openerpweb.jsonrequest
def drop(self, req, fields):
password, db = operator.itemgetter(
'drop_pwd', 'drop_db')(
dict(map(operator.itemgetter('name', 'value'), fields)))
try:
if req.session.proxy("db").drop(password, db):return True
except xmlrpclib.Fault, e:
if e.faultCode and e.faultCode.split(':')[0] == 'AccessDenied':
return {'error': e.faultCode, 'title': 'Drop Database'}
return {'error': _('Could not drop database !'), 'title': _('Drop Database')}
@openerpweb.httprequest
def backup(self, req, backup_db, backup_pwd, token):
try:
db_dump = base64.b64decode(
req.session.proxy("db").dump(backup_pwd, backup_db))
filename = "%(db)s_%(timestamp)s.dump" % {
'db': backup_db,
'timestamp': datetime.datetime.utcnow().strftime(
"%Y-%m-%d_%H-%M-%SZ")
}
return req.make_response(db_dump,
[('Content-Type', 'application/octet-stream; charset=binary'),
('Content-Disposition', content_disposition(filename, req))],
{'fileToken': token}
)
except xmlrpclib.Fault, e:
return simplejson.dumps([[],[{'error': e.faultCode, 'title': _('Backup Database')}]])
@openerpweb.httprequest
def restore(self, req, db_file, restore_pwd, new_db):
try:
data = base64.b64encode(db_file.read())
req.session.proxy("db").restore(restore_pwd, new_db, data)
return ''
except xmlrpclib.Fault, e:
if e.faultCode and e.faultCode.split(':')[0] == 'AccessDenied':
raise Exception("AccessDenied")
@openerpweb.jsonrequest
def change_password(self, req, fields):
old_password, new_password = operator.itemgetter(
'old_pwd', 'new_pwd')(
dict(map(operator.itemgetter('name', 'value'), fields)))
try:
return req.session.proxy("db").change_admin_password(old_password, new_password)
except xmlrpclib.Fault, e:
if e.faultCode and e.faultCode.split(':')[0] == 'AccessDenied':
return {'error': e.faultCode, 'title': _('Change Password')}
return {'error': _('Error, password not changed !'), 'title': _('Change Password')}
class Session(openerpweb.Controller):
_cp_path = "/web/session"
def session_info(self, req):
req.session.ensure_valid()
return {
"session_id": req.session_id,
"uid": req.session._uid,
"user_context": req.session.get_context() if req.session._uid else {},
"db": req.session._db,
"username": req.session._login,
}
@openerpweb.jsonrequest
def get_session_info(self, req):
return self.session_info(req)
@openerpweb.jsonrequest
def authenticate(self, req, db, login, password, base_location=None):
wsgienv = req.httprequest.environ
env = dict(
base_location=base_location,
HTTP_HOST=wsgienv['HTTP_HOST'],
REMOTE_ADDR=wsgienv['REMOTE_ADDR'],
)
req.session.authenticate(db, login, password, env)
return self.session_info(req)
@openerpweb.jsonrequest
def change_password (self,req,fields):
old_password, new_password,confirm_password = operator.itemgetter('old_pwd', 'new_password','confirm_pwd')(
dict(map(operator.itemgetter('name', 'value'), fields)))
if not (old_password.strip() and new_password.strip() and confirm_password.strip()):
return {'error':_('You cannot leave any password empty.'),'title': _('Change Password')}
if new_password != confirm_password:
return {'error': _('The new password and its confirmation must be identical.'),'title': _('Change Password')}
try:
if req.session.model('res.users').change_password(
old_password, new_password):
return {'new_password':new_password}
except Exception:
return {'error': _('The old password you provided is incorrect, your password was not changed.'), 'title': _('Change Password')}
return {'error': _('Error, password not changed !'), 'title': _('Change Password')}
@openerpweb.jsonrequest
def sc_list(self, req):
return req.session.model('ir.ui.view_sc').get_sc(
req.session._uid, "ir.ui.menu", req.context)
@openerpweb.jsonrequest
def get_lang_list(self, req):
try:
return req.session.proxy("db").list_lang() or []
except Exception, e:
return {"error": e, "title": _("Languages")}
@openerpweb.jsonrequest
def modules(self, req):
# return all installed modules. Web client is smart enough to not load a module twice
return module_installed(req)
@openerpweb.jsonrequest
def save_session_action(self, req, the_action):
"""
This method store an action object in the session object and returns an integer
identifying that action. The method get_session_action() can be used to get
back the action.
:param the_action: The action to save in the session.
:type the_action: anything
:return: A key identifying the saved action.
:rtype: integer
"""
saved_actions = req.httpsession.get('saved_actions')
if not saved_actions:
saved_actions = {"next":1, "actions":{}}
req.httpsession['saved_actions'] = saved_actions
# we don't allow more than 10 stored actions
if len(saved_actions["actions"]) >= 10:
del saved_actions["actions"][min(saved_actions["actions"])]
key = saved_actions["next"]
saved_actions["actions"][key] = the_action
saved_actions["next"] = key + 1
return key
@openerpweb.jsonrequest
def get_session_action(self, req, key):
"""
Gets back a previously saved action. This method can return None if the action
was saved since too much time (this case should be handled in a smart way).
:param key: The key given by save_session_action()
:type key: integer
:return: The saved action or None.
:rtype: anything
"""
saved_actions = req.httpsession.get('saved_actions')
if not saved_actions:
return None
return saved_actions["actions"].get(key)
@openerpweb.jsonrequest
def check(self, req):
req.session.assert_valid()
return None
@openerpweb.jsonrequest
def destroy(self, req):
req.session._suicide = True
class Menu(openerpweb.Controller):
_cp_path = "/web/menu"
@openerpweb.jsonrequest
def get_user_roots(self, req):
""" Return all root menu ids visible for the session user.
:param req: A request object, with an OpenERP session attribute
:type req: < session -> OpenERPSession >
:return: the root menu ids
:rtype: list(int)
"""
s = req.session
Menus = s.model('ir.ui.menu')
# If a menu action is defined use its domain to get the root menu items
user_menu_id = s.model('res.users').read([s._uid], ['menu_id'],
req.context)[0]['menu_id']
menu_domain = [('parent_id', '=', False)]
if user_menu_id:
domain_string = s.model('ir.actions.act_window').read(
[user_menu_id[0]], ['domain'],req.context)[0]['domain']
if domain_string:
menu_domain = ast.literal_eval(domain_string)
return Menus.search(menu_domain, 0, False, False, req.context)
@openerpweb.jsonrequest
def load(self, req):
""" Loads all menu items (all applications and their sub-menus).
:param req: A request object, with an OpenERP session attribute
:type req: < session -> OpenERPSession >
:return: the menu root
:rtype: dict('children': menu_nodes)
"""
Menus = req.session.model('ir.ui.menu')
fields = ['name', 'sequence', 'parent_id', 'action']
menu_root_ids = self.get_user_roots(req)
menu_roots = Menus.read(menu_root_ids, fields, req.context) if menu_root_ids else []
menu_root = {
'id': False,
'name': 'root',
'parent_id': [-1, ''],
'children': menu_roots,
'all_menu_ids': menu_root_ids,
}
if not menu_roots:
return menu_root
# menus are loaded fully unlike a regular tree view, cause there are a
# limited number of items (752 when all 6.1 addons are installed)
menu_ids = Menus.search([('id', 'child_of', menu_root_ids)], 0, False, False, req.context)
menu_items = Menus.read(menu_ids, fields, req.context)
# adds roots at the end of the sequence, so that they will overwrite
# equivalent menu items from full menu read when put into id:item
# mapping, resulting in children being correctly set on the roots.
menu_items.extend(menu_roots)
menu_root['all_menu_ids'] = menu_ids # includes menu_root_ids!
# make a tree using parent_id
menu_items_map = dict(
(menu_item["id"], menu_item) for menu_item in menu_items)
for menu_item in menu_items:
if menu_item['parent_id']:
parent = menu_item['parent_id'][0]
else:
parent = False
if parent in menu_items_map:
menu_items_map[parent].setdefault(
'children', []).append(menu_item)
# sort by sequence a tree using parent_id
for menu_item in menu_items:
menu_item.setdefault('children', []).sort(
key=operator.itemgetter('sequence'))
return menu_root
@openerpweb.jsonrequest
def load_needaction(self, req, menu_ids):
""" Loads needaction counters for specific menu ids.
:return: needaction data
:rtype: dict(menu_id: {'needaction_enabled': boolean, 'needaction_counter': int})
"""
return req.session.model('ir.ui.menu').get_needaction_data(menu_ids, req.context)
@openerpweb.jsonrequest
def action(self, req, menu_id):
# still used by web_shortcut
actions = load_actions_from_ir_values(req,'action', 'tree_but_open',
[('ir.ui.menu', menu_id)], False)
return {"action": actions}
class DataSet(openerpweb.Controller):
_cp_path = "/web/dataset"
@openerpweb.jsonrequest
def search_read(self, req, model, fields=False, offset=0, limit=False, domain=None, sort=None):
return self.do_search_read(req, model, fields, offset, limit, domain, sort)
def do_search_read(self, req, model, fields=False, offset=0, limit=False, domain=None
, sort=None):
""" Performs a search() followed by a read() (if needed) using the
provided search criteria
:param req: a JSON-RPC request object
:type req: openerpweb.JsonRequest
:param str model: the name of the model to search on
:param fields: a list of the fields to return in the result records
:type fields: [str]
:param int offset: from which index should the results start being returned
:param int limit: the maximum number of records to return
:param list domain: the search domain for the query
:param list sort: sorting directives
:returns: A structure (dict) with two keys: ids (all the ids matching
the (domain, context) pair) and records (paginated records
matching fields selection set)
:rtype: list
"""
Model = req.session.model(model)
ids = Model.search(domain, offset or 0, limit or False, sort or False,
req.context)
if limit and len(ids) == limit:
length = Model.search_count(domain, req.context)
else:
length = len(ids) + (offset or 0)
if fields and fields == ['id']:
# shortcut read if we only want the ids
return {
'length': length,
'records': [{'id': id} for id in ids]
}
records = Model.read(ids, fields or False, req.context)
records.sort(key=lambda obj: ids.index(obj['id']))
return {
'length': length,
'records': records
}
@openerpweb.jsonrequest
def load(self, req, model, id, fields):
m = req.session.model(model)
value = {}
r = m.read([id], False, req.context)
if r:
value = r[0]
return {'value': value}
def call_common(self, req, model, method, args, domain_id=None, context_id=None):
return self._call_kw(req, model, method, args, {})
def _call_kw(self, req, model, method, args, kwargs):
# Temporary implements future display_name special field for model#read()
if method == 'read' and kwargs.get('context', {}).get('future_display_name'):
if 'display_name' in args[1]:
names = dict(req.session.model(model).name_get(args[0], **kwargs))
args[1].remove('display_name')
records = req.session.model(model).read(*args, **kwargs)
for record in records:
record['display_name'] = \
names.get(record['id']) or "%s#%d" % (model, (record['id']))
return records
return getattr(req.session.model(model), method)(*args, **kwargs)
@openerpweb.jsonrequest
def call(self, req, model, method, args, domain_id=None, context_id=None):
return self._call_kw(req, model, method, args, {})
@openerpweb.jsonrequest
def call_kw(self, req, model, method, args, kwargs):
return self._call_kw(req, model, method, args, kwargs)
@openerpweb.jsonrequest
def call_button(self, req, model, method, args, domain_id=None, context_id=None):
action = self._call_kw(req, model, method, args, {})
if isinstance(action, dict) and action.get('type') != '':
return clean_action(req, action)
return False
@openerpweb.jsonrequest
def exec_workflow(self, req, model, id, signal):
return req.session.exec_workflow(model, id, signal)
@openerpweb.jsonrequest
def resequence(self, req, model, ids, field='sequence', offset=0):
""" Re-sequences a number of records in the model, by their ids
The re-sequencing starts at the first model of ``ids``, the sequence
number is incremented by one after each record and starts at ``offset``
:param ids: identifiers of the records to resequence, in the new sequence order
:type ids: list(id)
:param str field: field used for sequence specification, defaults to
"sequence"
:param int offset: sequence number for first record in ``ids``, allows
starting the resequencing from an arbitrary number,
defaults to ``0``
"""
m = req.session.model(model)
if not m.fields_get([field]):
return False
# python 2.6 has no start parameter
for i, id in enumerate(ids):
m.write(id, { field: i + offset })
return True
class View(openerpweb.Controller):
_cp_path = "/web/view"
@openerpweb.jsonrequest
def add_custom(self, req, view_id, arch):
CustomView = req.session.model('ir.ui.view.custom')
CustomView.create({
'user_id': req.session._uid,
'ref_id': view_id,
'arch': arch
}, req.context)
return {'result': True}
@openerpweb.jsonrequest
def undo_custom(self, req, view_id, reset=False):
CustomView = req.session.model('ir.ui.view.custom')
vcustom = CustomView.search([('user_id', '=', req.session._uid), ('ref_id' ,'=', view_id)],
0, False, False, req.context)
if vcustom:
if reset:
CustomView.unlink(vcustom, req.context)
else:
CustomView.unlink([vcustom[0]], req.context)
return {'result': True}
return {'result': False}
class TreeView(View):
_cp_path = "/web/treeview"
@openerpweb.jsonrequest
def action(self, req, model, id):
return load_actions_from_ir_values(
req,'action', 'tree_but_open',[(model, id)],
False)
class Binary(openerpweb.Controller):
_cp_path = "/web/binary"
@openerpweb.httprequest
def image(self, req, model, id, field, **kw):
last_update = '__last_update'
Model = req.session.model(model)
headers = [('Content-Type', 'image/png')]
etag = req.httprequest.headers.get('If-None-Match')
hashed_session = hashlib.md5(req.session_id).hexdigest()
id = None if not id else simplejson.loads(id)
if type(id) is list:
id = id[0] # m2o
if etag:
if not id and hashed_session == etag:
return werkzeug.wrappers.Response(status=304)
else:
date = Model.read([id], [last_update], req.context)[0].get(last_update)
if hashlib.md5(date).hexdigest() == etag:
return werkzeug.wrappers.Response(status=304)
retag = hashed_session
try:
if not id:
res = Model.default_get([field], req.context).get(field)
image_base64 = res
else:
res = Model.read([id], [last_update, field], req.context)[0]
retag = hashlib.md5(res.get(last_update)).hexdigest()
image_base64 = res.get(field)
if kw.get('resize'):
resize = kw.get('resize').split(',')
if len(resize) == 2 and int(resize[0]) and int(resize[1]):
width = int(resize[0])
height = int(resize[1])
# resize maximum 500*500
if width > 500: width = 500
if height > 500: height = 500
image_base64 = openerp.tools.image_resize_image(base64_source=image_base64, size=(width, height), encoding='base64', filetype='PNG')
image_data = base64.b64decode(image_base64)
except (TypeError, xmlrpclib.Fault):
image_data = self.placeholder(req)
headers.append(('ETag', retag))
headers.append(('Content-Length', len(image_data)))
try:
ncache = int(kw.get('cache'))
headers.append(('Cache-Control', 'no-cache' if ncache == 0 else 'max-age=%s' % (ncache)))
except:
pass
return req.make_response(image_data, headers)
def placeholder(self, req, image='placeholder.png'):
addons_path = openerpweb.addons_manifest['web']['addons_path']
return open(os.path.join(addons_path, 'web', 'static', 'src', 'img', image), 'rb').read()
@openerpweb.httprequest
def saveas(self, req, model, field, id=None, filename_field=None, **kw):
""" Download link for files stored as binary fields.
If the ``id`` parameter is omitted, fetches the default value for the
binary field (via ``default_get``), otherwise fetches the field for
that precise record.
:param req: OpenERP request
:type req: :class:`web.common.http.HttpRequest`
:param str model: name of the model to fetch the binary from
:param str field: binary field
:param str id: id of the record from which to fetch the binary
:param str filename_field: field holding the file's name, if any
:returns: :class:`werkzeug.wrappers.Response`
"""
Model = req.session.model(model)
fields = [field]
if filename_field:
fields.append(filename_field)
if id:
res = Model.read([int(id)], fields, req.context)[0]
else:
res = Model.default_get(fields, req.context)
filecontent = base64.b64decode(res.get(field, ''))
if not filecontent:
return req.not_found()
else:
filename = '%s_%s' % (model.replace('.', '_'), id)
if filename_field:
filename = res.get(filename_field, '') or filename
return req.make_response(filecontent,
[('Content-Type', 'application/octet-stream'),
('Content-Disposition', content_disposition(filename, req))])
@openerpweb.httprequest
def saveas_ajax(self, req, data, token):
jdata = simplejson.loads(data)
model = jdata['model']
field = jdata['field']
data = jdata['data']
id = jdata.get('id', None)
filename_field = jdata.get('filename_field', None)
context = jdata.get('context', {})
Model = req.session.model(model)
fields = [field]
if filename_field:
fields.append(filename_field)
if data:
res = { field: data }
elif id:
res = Model.read([int(id)], fields, context)[0]
else:
res = Model.default_get(fields, context)
filecontent = base64.b64decode(res.get(field, ''))
if not filecontent:
raise ValueError(_("No content found for field '%s' on '%s:%s'") %
(field, model, id))
else:
filename = '%s_%s' % (model.replace('.', '_'), id)
if filename_field:
filename = res.get(filename_field, '') or filename
return req.make_response(filecontent,
headers=[('Content-Type', 'application/octet-stream'),
('Content-Disposition', content_disposition(filename, req))],
cookies={'fileToken': token})
@openerpweb.httprequest
def upload(self, req, callback, ufile):
# TODO: might be useful to have a configuration flag for max-length file uploads
out = """<script language="javascript" type="text/javascript">
var win = window.top.window;
win.jQuery(win).trigger(%s, %s);
</script>"""
try:
data = ufile.read()
args = [len(data), ufile.filename,
ufile.content_type, base64.b64encode(data)]
except Exception, e:
args = [False, e.message]
return out % (simplejson.dumps(callback), simplejson.dumps(args))
@openerpweb.httprequest
def upload_attachment(self, req, callback, model, id, ufile):
Model = req.session.model('ir.attachment')
out = """<script language="javascript" type="text/javascript">
var win = window.top.window;
win.jQuery(win).trigger(%s, %s);
</script>"""
try:
attachment_id = Model.create({
'name': ufile.filename,
'datas': base64.encodestring(ufile.read()),
'datas_fname': ufile.filename,
'res_model': model,
'res_id': int(id)
}, req.context)
args = {
'filename': ufile.filename,
'id': attachment_id
}
except xmlrpclib.Fault, e:
args = {'error':e.faultCode }
return out % (simplejson.dumps(callback), simplejson.dumps(args))
@openerpweb.httprequest
def company_logo(self, req, dbname=None):
# TODO add etag, refactor to use /image code for etag
uid = None
if req.session._db:
dbname = req.session._db
uid = req.session._uid
elif dbname is None:
dbname = db_monodb(req)
if not uid:
uid = openerp.SUPERUSER_ID
if not dbname:
image_data = self.placeholder(req, 'logo.png')
else:
try:
# create an empty registry
registry = openerp.modules.registry.Registry(dbname)
with registry.cursor() as cr:
cr.execute("""SELECT c.logo_web
FROM res_users u
LEFT JOIN res_company c
ON c.id = u.company_id
WHERE u.id = %s
""", (uid,))
row = cr.fetchone()
if row and row[0]:
image_data = str(row[0]).decode('base64')
else:
image_data = self.placeholder(req, 'nologo.png')
except Exception:
image_data = self.placeholder(req, 'logo.png')
headers = [
('Content-Type', 'image/png'),
('Content-Length', len(image_data)),
]
return req.make_response(image_data, headers)
class Action(openerpweb.Controller):
_cp_path = "/web/action"
@openerpweb.jsonrequest
def load(self, req, action_id, do_not_eval=False):
Actions = req.session.model('ir.actions.actions')
value = False
try:
action_id = int(action_id)
except ValueError:
try:
module, xmlid = action_id.split('.', 1)
model, action_id = req.session.model('ir.model.data').get_object_reference(module, xmlid)
assert model.startswith('ir.actions.')
except Exception:
action_id = 0 # force failed read
base_action = Actions.read([action_id], ['type'], req.context)
if base_action:
ctx = {}
action_type = base_action[0]['type']
if action_type == 'ir.actions.report.xml':
ctx.update({'bin_size': True})
ctx.update(req.context)
action = req.session.model(action_type).read([action_id], False, ctx)
if action:
value = clean_action(req, action[0])
return value
@openerpweb.jsonrequest
def run(self, req, action_id):
return_action = req.session.model('ir.actions.server').run(
[action_id], req.context)
if return_action:
return clean_action(req, return_action)
else:
return False
class Export(openerpweb.Controller):
_cp_path = "/web/export"
@openerpweb.jsonrequest
def formats(self, req):
""" Returns all valid export formats
:returns: for each export format, a pair of identifier and printable name
:rtype: [(str, str)]
"""
return sorted([
controller.fmt
for path, controller in openerpweb.controllers_path.iteritems()
if path.startswith(self._cp_path)
if hasattr(controller, 'fmt')
], key=operator.itemgetter("label"))
def fields_get(self, req, model):
Model = req.session.model(model)
fields = Model.fields_get(False, req.context)
return fields
@openerpweb.jsonrequest
def get_fields(self, req, model, prefix='', parent_name= '',
import_compat=True, parent_field_type=None,
exclude=None):
if import_compat and parent_field_type == "many2one":
fields = {}
else:
fields = self.fields_get(req, model)
if import_compat:
fields.pop('id', None)
else:
fields['.id'] = fields.pop('id', {'string': 'ID'})
fields_sequence = sorted(fields.iteritems(),
key=lambda field: field[1].get('string', ''))
records = []
for field_name, field in fields_sequence:
if import_compat:
if exclude and field_name in exclude:
continue
if field.get('readonly'):
# If none of the field's states unsets readonly, skip the field
if all(dict(attrs).get('readonly', True)
for attrs in field.get('states', {}).values()):
continue
if not field.get('exportable', True):
continue
id = prefix + (prefix and '/'or '') + field_name
name = parent_name + (parent_name and '/' or '') + field['string']
record = {'id': id, 'string': name,
'value': id, 'children': False,
'field_type': field.get('type'),
'required': field.get('required'),
'relation_field': field.get('relation_field')}
records.append(record)
if len(name.split('/')) < 3 and 'relation' in field:
ref = field.pop('relation')
record['value'] += '/id'
record['params'] = {'model': ref, 'prefix': id, 'name': name}
if not import_compat or field['type'] == 'one2many':
# m2m field in import_compat is childless
record['children'] = True
return records
@openerpweb.jsonrequest
def namelist(self,req, model, export_id):
# TODO: namelist really has no reason to be in Python (although itertools.groupby helps)
export = req.session.model("ir.exports").read([export_id])[0]
export_fields_list = req.session.model("ir.exports.line").read(
export['export_fields'])
fields_data = self.fields_info(
req, model, map(operator.itemgetter('name'), export_fields_list))
return [
{'name': field['name'], 'label': fields_data[field['name']]}
for field in export_fields_list
]
def fields_info(self, req, model, export_fields):
info = {}
fields = self.fields_get(req, model)
if ".id" in export_fields:
fields['.id'] = fields.pop('id', {'string': 'ID'})
# To make fields retrieval more efficient, fetch all sub-fields of a
# given field at the same time. Because the order in the export list is
# arbitrary, this requires ordering all sub-fields of a given field
# together so they can be fetched at the same time
#
# Works the following way:
# * sort the list of fields to export, the default sorting order will
# put the field itself (if present, for xmlid) and all of its
# sub-fields right after it
# * then, group on: the first field of the path (which is the same for
# a field and for its subfields and the length of splitting on the
# first '/', which basically means grouping the field on one side and
# all of the subfields on the other. This way, we have the field (for
# the xmlid) with length 1, and all of the subfields with the same
# base but a length "flag" of 2
# * if we have a normal field (length 1), just add it to the info
# mapping (with its string) as-is
# * otherwise, recursively call fields_info via graft_subfields.
# all graft_subfields does is take the result of fields_info (on the
# field's model) and prepend the current base (current field), which
# rebuilds the whole sub-tree for the field
#
# result: because we're not fetching the fields_get for half the
# database models, fetching a namelist with a dozen fields (including
# relational data) falls from ~6s to ~300ms (on the leads model).
# export lists with no sub-fields (e.g. import_compatible lists with
# no o2m) are even more efficient (from the same 6s to ~170ms, as
# there's a single fields_get to execute)
for (base, length), subfields in itertools.groupby(
sorted(export_fields),
lambda field: (field.split('/', 1)[0], len(field.split('/', 1)))):
subfields = list(subfields)
if length == 2:
# subfields is a seq of $base/*rest, and not loaded yet
info.update(self.graft_subfields(
req, fields[base]['relation'], base, fields[base]['string'],
subfields
))
else:
info[base] = fields[base]['string']
return info
def graft_subfields(self, req, model, prefix, prefix_string, fields):
export_fields = [field.split('/', 1)[1] for field in fields]
return (
(prefix + '/' + k, prefix_string + '/' + v)
for k, v in self.fields_info(req, model, export_fields).iteritems())
class ExportFormat(object):
@property
def content_type(self):
""" Provides the format's content type """
raise NotImplementedError()
def filename(self, base):
""" Creates a valid filename for the format (with extension) from the
provided base name (exension-less)
"""
raise NotImplementedError()
def from_data(self, fields, rows):
""" Conversion method from OpenERP's export data to whatever the
current export class outputs
:params list fields: a list of fields to export
:params list rows: a list of records to export
:returns:
:rtype: bytes
"""
raise NotImplementedError()
@openerpweb.httprequest
def index(self, req, data, token):
model, fields, ids, domain, import_compat = \
operator.itemgetter('model', 'fields', 'ids', 'domain',
'import_compat')(
simplejson.loads(data))
Model = req.session.model(model)
ids = ids or Model.search(domain, 0, False, False, req.context)
field_names = map(operator.itemgetter('name'), fields)
import_data = Model.export_data(ids, field_names, req.context).get('datas',[])
if import_compat:
columns_headers = field_names
else:
columns_headers = [val['label'].strip() for val in fields]
return req.make_response(self.from_data(columns_headers, import_data),
headers=[('Content-Disposition',
content_disposition(self.filename(model), req)),
('Content-Type', self.content_type)],
cookies={'fileToken': token})
class CSVExport(ExportFormat, http.Controller):
_cp_path = '/web/export/csv'
fmt = {'tag': 'csv', 'label': 'CSV'}
@property
def content_type(self):
return 'text/csv;charset=utf8'
def filename(self, base):
return base + '.csv'
def from_data(self, fields, rows):
fp = StringIO()
writer = csv.writer(fp, quoting=csv.QUOTE_ALL)
writer.writerow([name.encode('utf-8') for name in fields])
for data in rows:
row = []
for d in data:
if isinstance(d, basestring):
d = d.replace('\n',' ').replace('\t',' ')
try:
d = d.encode('utf-8')
except UnicodeError:
pass
if d is False: d = None
row.append(d)
writer.writerow(row)
fp.seek(0)
data = fp.read()
fp.close()
return data
class ExcelExport(ExportFormat, http.Controller):
_cp_path = '/web/export/xls'
fmt = {
'tag': 'xls',
'label': 'Excel',
'error': None if xlwt else "XLWT required"
}
@property
def content_type(self):
return 'application/vnd.ms-excel'
def filename(self, base):
return base + '.xls'
def from_data(self, fields, rows):
workbook = xlwt.Workbook()
worksheet = workbook.add_sheet('Sheet 1')
for i, fieldname in enumerate(fields):
worksheet.write(0, i, fieldname)
worksheet.col(i).width = 8000 # around 220 pixels
style = xlwt.easyxf('align: wrap yes')
for row_index, row in enumerate(rows):
for cell_index, cell_value in enumerate(row):
if isinstance(cell_value, basestring):
cell_value = re.sub("\r", " ", cell_value)
if cell_value is False: cell_value = None
worksheet.write(row_index + 1, cell_index, cell_value, style)
fp = StringIO()
workbook.save(fp)
fp.seek(0)
data = fp.read()
fp.close()
return data
class Reports(openerpweb.Controller):
_cp_path = "/web/report"
POLLING_DELAY = 0.25
TYPES_MAPPING = {
'doc': 'application/vnd.ms-word',
'html': 'text/html',
'odt': 'application/vnd.oasis.opendocument.text',
'pdf': 'application/pdf',
'sxw': 'application/vnd.sun.xml.writer',
'xls': 'application/vnd.ms-excel',
}
@openerpweb.httprequest
def index(self, req, action, token):
action = simplejson.loads(action)
report_srv = req.session.proxy("report")
context = dict(req.context)
context.update(action["context"])
report_data = {}
report_ids = context["active_ids"]
if 'report_type' in action:
report_data['report_type'] = action['report_type']
if 'datas' in action:
if 'ids' in action['datas']:
report_ids = action['datas'].pop('ids')
report_data.update(action['datas'])
report_id = report_srv.report(
req.session._db, req.session._uid, req.session._password,
action["report_name"], report_ids,
report_data, context)
report_struct = None
while True:
report_struct = report_srv.report_get(
req.session._db, req.session._uid, req.session._password, report_id)
if report_struct["state"]:
break
time.sleep(self.POLLING_DELAY)
report = base64.b64decode(report_struct['result'])
if report_struct.get('code') == 'zlib':
report = zlib.decompress(report)
report_mimetype = self.TYPES_MAPPING.get(
report_struct['format'], 'octet-stream')
file_name = action.get('name', 'report')
if 'name' not in action:
reports = req.session.model('ir.actions.report.xml')
res_id = reports.search([('report_name', '=', action['report_name']),],
0, False, False, context)
if len(res_id) > 0:
file_name = reports.read(res_id[0], ['name'], context)['name']
else:
file_name = action['report_name']
file_name = '%s.%s' % (file_name, report_struct['format'])
return req.make_response(report,
headers=[
('Content-Disposition', content_disposition(file_name, req)),
('Content-Type', report_mimetype),
('Content-Length', len(report))],
cookies={'fileToken': token})
# vim:expandtab:tabstop=4:softtabstop=4:shiftwidth=4:
| aricchen/openHR | openerp/addons/web/controllers/main.py | Python | agpl-3.0 | 68,469 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
try:
reload # Python 2
except NameError: # Python 3
from importlib import reload
def test_engine_import():
import mxnet
engine_types = ['', 'NaiveEngine', 'ThreadedEngine', 'ThreadedEnginePerDevice']
for type in engine_types:
if type:
os.environ['MXNET_ENGINE_TYPE'] = type
else:
os.environ.pop('MXNET_ENGINE_TYPE', None)
reload(mxnet)
| zhreshold/mxnet | tests/python/unittest/test_engine_import.py | Python | apache-2.0 | 1,215 |
"""
LLDB Formatters for LLVM data types.
Load into LLDB with 'command script import /path/to/lldbDataFormatters.py'
"""
def __lldb_init_module(debugger, internal_dict):
debugger.HandleCommand('type category define -e llvm -l c++')
debugger.HandleCommand('type synthetic add -w llvm '
'-l lldbDataFormatters.SmallVectorSynthProvider '
'-x "^llvm::SmallVectorImpl<.+>$"')
debugger.HandleCommand('type synthetic add -w llvm '
'-l lldbDataFormatters.SmallVectorSynthProvider '
'-x "^llvm::SmallVector<.+,.+>$"')
# Pretty printer for llvm::SmallVector/llvm::SmallVectorImpl
class SmallVectorSynthProvider:
def __init__(self, valobj, dict):
self.valobj = valobj;
self.update() # initialize this provider
def num_children(self):
begin = self.begin.GetValueAsUnsigned(0)
end = self.end.GetValueAsUnsigned(0)
return (end - begin)/self.type_size
def get_child_index(self, name):
try:
return int(name.lstrip('[').rstrip(']'))
except:
return -1;
def get_child_at_index(self, index):
# Do bounds checking.
if index < 0:
return None
if index >= self.num_children():
return None;
offset = index * self.type_size
return self.begin.CreateChildAtOffset('['+str(index)+']',
offset, self.data_type)
def update(self):
self.begin = self.valobj.GetChildMemberWithName('BeginX')
self.end = self.valobj.GetChildMemberWithName('EndX')
the_type = self.valobj.GetType()
# If this is a reference type we have to dereference it to get to the
# template parameter.
if the_type.IsReferenceType():
the_type = the_type.GetDereferencedType()
self.data_type = the_type.GetTemplateArgumentType(0)
self.type_size = self.data_type.GetByteSize()
assert self.type_size != 0
| chubbymaggie/asap | utils/lldbDataFormatters.py | Python | bsd-2-clause | 2,056 |
import os
import sys
import numpy as np
import nibabel as nb
import matplotlib.pyplot as plt
from glob import glob as gg
from os.path import join as opj
from os.path import basename as opb
from nilearn.image import smooth_img
from nilearn.plotting import plot_roi
from nilearn.plotting.find_cuts import find_cut_slices
from nibabel.nifti1 import Nifti1Image
def getEqualSpacing(dirMin, dirMax, ortho, nCuts):
"""
Computes cut coordinates with equal spacing in a given direction
"""
if ortho == 'x':
idx = 0
elif ortho == 'y':
idx = 1
elif ortho == 'z':
idx = 2
sign = -1.0 * np.sign(dirMin[idx])
stepsize = sign * int(np.abs(dirMin[idx] - dirMax[idx]) / (nCuts + 1))
cut_order = np.arange(dirMin[idx], dirMax[idx], stepsize)
if len(cut_order) == nCuts + 2:
cut_coords = cut_order[1:-1]
else:
cut_coords = np.delete(cut_order, np.argmax(np.abs(cut_order)))
return cut_coords
def plotGlassbrainSlices(niftipath, mnipath, ortho='z', nRows=2, nCuts=6,
threshpos=0, threshneg=0, figLayout='Both',
showLRannot=True, findOptimalCut=True,
imageType='svg'):
"""
Creates nice glassbrain slice figures in the direction x, y and z
"""
# Initiation of relevant parameters
img = nb.load(niftipath)
lineW = 2. / (nRows + int((figLayout == 'Brain' or figLayout == 'Both')))
# Reduce 4D volume to 3D
if len(img.shape) == 4:
data4D = img.get_data()
data4D = data4D.reshape(data4D.shape[:-1])
img = Nifti1Image(data4D, img.get_affine())
# Get voxel extend in all directions
dirMin = np.dot(img.get_affine(), [0, 0, 0, 1])[:3]
dirMax = np.dot(img.get_affine(),
np.array(img.shape).tolist() + [1])[:3]
if findOptimalCut:
# Find cuts automatically
cut_coords = find_cut_slices(img, direction=ortho, n_cuts=nCuts)
else:
# Split orientation in x-equal parts
cut_coords = getEqualSpacing(dirMin, dirMax, ortho, nCuts)
# Split cuts according nRows
cut_coords = [cut_coords[int(i * len(cut_coords) / np.float(nRows)):
int((i + 1) * len(cut_coords) / np.float(nRows))]
for i in range(nRows)]
# Create Slices
for i in range(nRows):
# Create axes for plotting
ax = plt.subplot(nRows + int((figLayout == 'Brain' or
figLayout == 'Both')),
1, i + 1)
# Plot the white background for all slices as a zeros value brain
# (without it, the view focuses around the first area plotted)
zerobrain = Nifti1Image(img.get_data() * 0, img.get_affine())
brain = plot_roi(
zerobrain, zerobrain, colorbar=False, cut_coords=cut_coords[i],
display_mode=ortho, alpha=1, draw_cross=False, cmap=plt.cm.gray,
black_bg=False, axes=ax, annotate=False)
# Plot positive values
posdata = np.copy(img.get_data())
posdata[posdata <= threshpos] = 0.001 # = 0 crashes contour function
posbrain = Nifti1Image(posdata, img.get_affine())
brain.add_contours(
posbrain, filled=False, cmap=plt.cm.hot, alpha=1, linewidths=lineW)
# Plot negative values
negdata = np.copy(img.get_data())
negdata[negdata >= -threshneg] = 0.001 # = 0 crashes contour function
negbrain = Nifti1Image(negdata, img.get_affine())
brain.add_contours(
negbrain, filled=False, cmap=plt.cm.winter, alpha=1,
linewidths=lineW)
# Plot outer MNI contours
brain.add_contours(
smooth_img(mnipath, 4), alpha=1, filled=False,
levels=[100], linewidths=lineW, cmap=plt.cm.gray)
# Plot inner MNI contours
brain.add_contours(
nb.load(mnipath), alpha=0.8, levels=[5000], linewidths=lineW,
cmap=plt.cm.gray)
# Add annotation if requested
if figLayout == 'Both' or figLayout == 'Number':
brain.annotate(left_right=showLRannot, size=int(12 * lineW))
# Plot overview Brain at the bottom
if figLayout == 'Brain' or figLayout == 'Both':
# Create axes for overview brain
ax = plt.subplot(nRows + 1, 1, nRows + 1)
# Find overview view direction
if ortho == 'z':
direction = 'x'
elif ortho == 'x':
direction = 'z'
elif ortho == 'y':
direction = 'z'
# Plot the white backgroundas a zeros value brain
brain = plot_roi(
zerobrain, zerobrain, colorbar=False, cut_coords=[0],
display_mode=direction, alpha=1, draw_cross=False,
cmap=plt.cm.gray, black_bg=False, axes=ax, annotate=False)
# Plot positive values
brain.add_contours(
posbrain, filled=False, cmap=plt.cm.hot, alpha=1, linewidths=lineW)
# Plot negative values
brain.add_contours(
negbrain, filled=False, cmap=plt.cm.winter, alpha=1,
linewidths=lineW)
# Plot outer MNI contours
brain.add_contours(
smooth_img(mnipath, 4), alpha=1, filled=False,
levels=[100], linewidths=lineW, cmap=plt.cm.gray)
# Plot inner MNI contours
brain.add_contours(
nb.load(mnipath), alpha=0.8, levels=[5000], linewidths=lineW,
cmap=plt.cm.gray)
# Plot the line indicating the cut
for i in np.array(cut_coords).flatten():
if ortho == 'z' or ortho == 'y':
ax.plot([-100, 100], [i, i], 'k-', lw=lineW)
elif ortho == 'x':
ax.plot([i, i], [-100, 100], 'k-', lw=lineW)
if ortho == 'z':
ax.axis((-300.0, 300.0, dirMin[2], dirMax[2]))
elif ortho == 'y':
ax.axis((-300.0, 300.0, dirMin[1], dirMax[1]))
elif ortho == 'x':
stretcher = (nRows + 1) / 2.
ax.axis((-300.0 * stretcher, 300.0 * stretcher, -100.0, 100.0))
# Add annotation if requested
if figLayout == 'Both' or figLayout == 'Number':
brain.annotate(left_right=showLRannot, size=int(12 * lineW))
# Get file prefix
if niftipath.endswith('.nii'):
filename = opb(niftipath)[:-4]
elif niftipath.endswith('.nii.gz'):
filename = opb(niftipath)[:-7]
# Create output folder
path2Figure = opj(os.path.split(os.path.realpath(niftipath))[0], 'figures')
if not os.path.exists(opj(path2Figure)):
os.makedirs(opj(path2Figure))
# Save figure
figname = '_'.join([filename, '%s-cut' % ortho])
plt.savefig(opj(path2Figure, '%s.%s' % (figname, imageType)))
plt.clf()
if __name__ == "__main__":
niftipath = str(sys.argv[1])
mnipath = str(sys.argv[2])
ortho = str(sys.argv[3])
nRows = int(sys.argv[4])
nCuts = int(sys.argv[5])
showLRannot = bool(int(sys.argv[6]))
figLayout = str(sys.argv[7])
threshpos = int(sys.argv[8])
threshneg = int(sys.argv[9])
findOptimalCut = bool(int(sys.argv[10]))
imageType = str(sys.argv[11])
# Go through all the files in the data folder if requested
if niftipath == 'data':
fileList = gg('data/*.nii*')
for fpath in fileList:
for o in list(ortho):
plotGlassbrainSlices(
fpath, mnipath, o, nRows, nCuts, threshpos, threshneg,
figLayout, showLRannot, findOptimalCut, imageType)
else:
for o in list(ortho):
plotGlassbrainSlices(
niftipath, mnipath, o, nRows, nCuts, threshpos, threshneg,
figLayout, showLRannot, findOptimalCut, imageType)
| WhitakerLab/BrainsForPublication | scripts/plot_glassbrain_slices.py | Python | mit | 7,791 |
from distutils.version import StrictVersion
import datetime
import hashlib
import os
import random
import re
import socket
import shutil
import time
import sys
import seesaw
from seesaw.config import realize, NumberConfigValue
from seesaw.externalprocess import WgetDownload, ExternalProcess
from seesaw.item import ItemInterpolation, ItemValue
from seesaw.pipeline import Pipeline
from seesaw.project import Project
from seesaw.task import SimpleTask, SetItemKey, LimitConcurrent
from seesaw.tracker import PrepareStatsForTracker, GetItemFromTracker, \
UploadWithTracker, SendDoneToTracker
from seesaw.util import find_executable
# check the seesaw version
if StrictVersion(seesaw.__version__) < StrictVersion("0.8.3"):
raise Exception("This pipeline needs seesaw version 0.8.3 or higher.")
###########################################################################
# Find a useful Wpull executable.
#
# WPULL_EXE will be set to the first path that
# 1. does not crash with --version, and
# 2. prints the required version string
WPULL_EXE = find_executable(
"Wpull",
re.compile(r"\b1\.0\b"),
[
"./wpull",
os.path.expanduser("~/.local/share/wpull-1.0/wpull"),
os.path.expanduser("~/.local/bin/wpull"),
"./wpull_bootstrap",
"wpull",
]
)
if not WPULL_EXE:
raise Exception("No usable Wpull found.")
###########################################################################
# The version number of this pipeline definition.
#
# Update this each time you make a non-cosmetic change.
# It will be added to the WARC files and reported to the tracker.
VERSION = "20150321.03"
USER_AGENT = 'ArchiveTeam'
TRACKER_ID = 'madden-giferator'
TRACKER_HOST = 'tracker.archiveteam.org'
###########################################################################
# This section defines project-specific tasks.
#
# Simple tasks (tasks that do not need any concurrency) are based on the
# SimpleTask class and have a process(item) method that is called for
# each item.
class CheckIP(SimpleTask):
def __init__(self):
SimpleTask.__init__(self, "CheckIP")
self._counter = 0
def process(self, item):
if self._counter <= 0:
item.log_output('Checking IP address.')
ip_set = set()
ip_set.add(socket.gethostbyname('twitter.com'))
ip_set.add(socket.gethostbyname('facebook.com'))
ip_set.add(socket.gethostbyname('youtube.com'))
ip_set.add(socket.gethostbyname('microsoft.com'))
ip_set.add(socket.gethostbyname('icanhas.cheezburger.com'))
ip_set.add(socket.gethostbyname('archiveteam.org'))
if len(ip_set) != 6:
item.log_output('Got IP addresses: {0}'.format(ip_set))
item.log_output(
'You are behind a firewall or proxy. That is a big no-no!')
raise Exception(
'You are behind a firewall or proxy. That is a big no-no!')
# Check only occasionally
if self._counter <= 0:
self._counter = 10
else:
self._counter -= 1
class PrepareDirectories(SimpleTask):
def __init__(self, warc_prefix):
SimpleTask.__init__(self, "PrepareDirectories")
self.warc_prefix = warc_prefix
def process(self, item):
item_name = item["item_name"]
# escaped_item_name = item_name.replace(':', '_').replace('/', '_')
escaped_item_name = hashlib.sha1(item_name.encode('utf8')).hexdigest()
item['escaped_item_name'] = escaped_item_name
dirname = os.path.join(item["data_dir"], escaped_item_name)
if os.path.isdir(dirname):
shutil.rmtree(dirname)
os.makedirs(dirname)
item["item_dir"] = dirname
item["warc_file_base"] = "%s-%s-%s" % (
self.warc_prefix, escaped_item_name,
time.strftime("%Y%m%d-%H%M%S")
)
open("%(item_dir)s/%(warc_file_base)s.warc.gz" % item, "w").close()
class MoveFiles(SimpleTask):
def __init__(self):
SimpleTask.__init__(self, "MoveFiles")
def process(self, item):
# Check if wget was compiled with zlib support
if os.path.exists("%(item_dir)s/%(warc_file_base)s.warc" % item):
raise Exception('Please compile wget with zlib support!')
os.rename("%(item_dir)s/%(warc_file_base)s.warc.gz" % item,
"%(data_dir)s/%(warc_file_base)s.warc.gz" % item)
shutil.rmtree("%(item_dir)s" % item)
def get_hash(filename):
with open(filename, 'rb') as in_file:
return hashlib.sha1(in_file.read()).hexdigest()
CWD = os.getcwd()
PIPELINE_SHA1 = get_hash(os.path.join(CWD, 'pipeline.py'))
SCRIPT_SHA1 = get_hash(os.path.join(CWD, 'madden_giferator.py'))
def stats_id_function(item):
# For accountability and stats.
d = {
'pipeline_hash': PIPELINE_SHA1,
'script_hash': SCRIPT_SHA1,
'python_version': sys.version,
}
return d
class WgetArgs(object):
def realize(self, item):
wget_args = [
WPULL_EXE,
"-nv",
"--user-agent", USER_AGENT,
"--python-script", "madden_giferator.py",
"-o", ItemInterpolation("%(item_dir)s/wpull.log"),
"--no-check-certificate",
"--database", ItemInterpolation("%(item_dir)s/wpull.db"),
"--delete-after",
"--no-robots",
"--no-cookies",
"--rotate-dns",
# "--recursive", "--level=inf",
# "--recursive", "--level=2",
"--no-parent",
# "--page-requisites",
# "--span-hosts-allow", "page-requisites,linked-pages",
# "--span-hosts-allow", "page-requisites",
"--timeout", "60",
"--tries", "inf",
"--wait", "0.2",
"--random-wait",
"--waitretry", "60",
"--retry-connrefused",
"--retry-dns-error",
# "--domains", "example.com,example.net",
# "--hostnames", "assets.cloudspeeder.invalid,cnd.wahoo.invalid",
"--warc-file", ItemInterpolation("%(item_dir)s/%(warc_file_base)s"),
"--warc-header", "operator: Archive Team",
"--warc-header", "madden-giferator-dld-script-version: " + VERSION,
"--warc-header", ItemInterpolation("madden-gifferator-user: %(item_name)s"),
"--no-skip-getaddrinfo", # Use OS DNS resolver only
]
# Occasionally grab some of the assets
if random.random() < 0.1:
wget_args.extend([
"--recursive", "--level=1",
"--page-requisites",
"--span-hosts-allow", "page-requisites",
])
item_type, item_value = item['item_name'].split(':', 1)
if item_type != 'gif':
raise Exception('Unexpected item type {0}'.format(item_type))
gif_numbers = item_value.split(',')
for gif_number in gif_numbers:
wget_args.append("http://giferator.easports.com/gif/{0}".format(gif_number))
wget_args.append("http://prod-api-madden.grw.io/api/memes/meme-id/{0}".format(gif_number))
if 'bind_address' in globals():
wget_args.extend(['--bind-address', globals()['bind_address']])
print('')
print('*** Wget will bind address at {0} ***'.format(
globals()['bind_address']))
print('')
return realize(wget_args, item)
###########################################################################
# Initialize the project.
#
# This will be shown in the warrior management panel. The logo should not
# be too big. The deadline is optional.
project = Project(
title="Madden GIFERATOR",
project_html="""
<img class="project-logo" alt="Project logo" src="http://archiveteam.org/images/5/5f/Giferatorlogo.png" height="50px" title=""/>
<h2>Madden GIFERATOR
<span class="links">
<a href="http://giferator.easports.com/">Website</a> ·
<a href="http://tracker.archiveteam.org/madden-giferator">Leaderboard</a> ·
<a href="http://archiveteam.org/index.php?title=Madden_GIFERATOR">Wiki</a>
</span>
</h2>
<p>Saving the GIFs.</p>
<!--<p class="projectBroadcastMessage"></p>-->
""",
# utc_deadline=datetime.datetime(2000, 1, 1, 23, 59, 0)
)
pipeline = Pipeline(
CheckIP(),
GetItemFromTracker("http://%s/%s" % (TRACKER_HOST, TRACKER_ID), downloader,
VERSION),
PrepareDirectories(warc_prefix="madden-giferator"),
WgetDownload(
WgetArgs(),
max_tries=1,
accept_on_exit_code=[0, 4, 7, 8],
env={
"item_dir": ItemValue("item_dir"),
"downloader": downloader
}
),
PrepareStatsForTracker(
defaults={"downloader": downloader, "version": VERSION},
file_groups={
"data": [
ItemInterpolation("%(item_dir)s/%(warc_file_base)s.warc.gz"),
]
},
id_function=stats_id_function,
),
MoveFiles(),
LimitConcurrent(
NumberConfigValue(min=1, max=4, default="1",
name="shared:rsync_threads", title="Rsync threads",
description="The maximum number of concurrent uploads."),
UploadWithTracker(
"http://%s/%s" % (TRACKER_HOST, TRACKER_ID),
downloader=downloader,
version=VERSION,
files=[
ItemInterpolation("%(data_dir)s/%(warc_file_base)s.warc.gz"),
],
rsync_target_source_path=ItemInterpolation("%(data_dir)s/"),
rsync_extra_args=[
"--recursive",
"--partial",
"--partial-dir", ".rsync-tmp",
]
),
),
SendDoneToTracker(
tracker_url="http://%s/%s" % (TRACKER_HOST, TRACKER_ID),
stats=ItemValue("stats")
)
)
| ArchiveTeam/madden-giferator-grab | pipeline.py | Python | unlicense | 10,105 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
from azure.mgmt.core import ARMPipelineClient
from msrest import Deserializer, Serializer
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Optional
from azure.core.credentials import TokenCredential
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from ._configuration import MonitorManagementClientConfiguration
from .operations import ActionGroupsOperations
from .operations import BaselinesOperations
from . import models
class MonitorManagementClient(object):
"""Monitor Management Client.
:ivar action_groups: ActionGroupsOperations operations
:vartype action_groups: $(python-base-namespace).v2019_03_01.operations.ActionGroupsOperations
:ivar baselines: BaselinesOperations operations
:vartype baselines: $(python-base-namespace).v2019_03_01.operations.BaselinesOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The ID of the target subscription.
:type subscription_id: str
:param str base_url: Service URL
"""
def __init__(
self,
credential, # type: "TokenCredential"
subscription_id, # type: str
base_url=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> None
if not base_url:
base_url = 'https://management.azure.com'
self._config = MonitorManagementClientConfiguration(credential, subscription_id, **kwargs)
self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._serialize.client_side_validation = False
self._deserialize = Deserializer(client_models)
self.action_groups = ActionGroupsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.baselines = BaselinesOperations(
self._client, self._config, self._serialize, self._deserialize)
def _send_request(self, http_request, **kwargs):
# type: (HttpRequest, Any) -> HttpResponse
"""Runs the network request through the client's chained policies.
:param http_request: The network request you want to make. Required.
:type http_request: ~azure.core.pipeline.transport.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to True.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.pipeline.transport.HttpResponse
"""
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
}
http_request.url = self._client.format_url(http_request.url, **path_format_arguments)
stream = kwargs.pop("stream", True)
pipeline_response = self._client._pipeline.run(http_request, stream=stream, **kwargs)
return pipeline_response.http_response
def close(self):
# type: () -> None
self._client.close()
def __enter__(self):
# type: () -> MonitorManagementClient
self._client.__enter__()
return self
def __exit__(self, *exc_details):
# type: (Any) -> None
self._client.__exit__(*exc_details)
| Azure/azure-sdk-for-python | sdk/monitor/azure-mgmt-monitor/azure/mgmt/monitor/v2019_03_01/_monitor_management_client.py | Python | mit | 4,036 |
import os
try:
import thread
except ImportError:
import threading
import subprocess
import functools
import sublime
import time
class AsyncProcess(object):
def __init__(self, cmd, listener):
self.cmd = cmd
self.listener = listener
#print("DEBUG_EXEC: " + str(self.cmd))
self.proc = subprocess.Popen(self.cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
try:
if self.proc.stdout:
thread.start_new_thread(self.read_stdout, ())
if self.proc.stderr:
thread.start_new_thread(self.read_stderr, ())
thread.start_new_thread(self.poll, ())
except NameError:
if self.proc.stdout:
self.stdoutThread = threading.Thread(target=self.read_stdout)
self.stdoutThread.start()
if self.proc.stderr:
self.stderrThread = threading.Thread(target=self.read_stderr)
self.stderrThread.start()
self.pollThread = threading.Thread(target=self.poll)
self.pollThread.start()
def poll(self):
while True:
if self.proc.poll() is not None:
sublime.set_timeout(functools.partial(self.terminate), 0)
break
time.sleep(0.1)
def read_stdout(self):
while self.listener.is_running:
data = os.read(self.proc.stdout.fileno(), 2**15)
if data != b'':
sublime.set_timeout(functools.partial(self.listener.append_data, self.proc, data), 0)
def read_stderr(self):
while self.listener.is_running:
data = os.read(self.proc.stderr.fileno(), 2**15)
if data != b'':
sublime.set_timeout(functools.partial(self.listener.append_data, self.proc, data), 0)
def terminate(self):
sublime.set_timeout(functools.partial(self.listener.proc_terminated, self.proc), 0)
self.listener.is_running = False
self.pollThread.join()
self.stdoutThread.join()
self.proc.stdout.close()
self.stderrThread.join()
self.proc.stderr.close() | fbzhong/sublime-jslint | asyncprocess.py | Python | bsd-3-clause | 2,165 |
#!/usr/bin/env python
#
# Copyright 2014 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dual Moving Average Crossover algorithm.
This algorithm buys apple once its short moving average crosses
its long moving average (indicating upwards momentum) and sells
its shares once the averages cross again (indicating downwards
momentum).
"""
# Import exponential moving average from talib wrapper
from zipline.transforms.ta import EMA
def initialize(context):
context.asset = symbol('AAPL')
# Add 2 mavg transforms, one with a long window, one with a short window.
context.short_ema_trans = EMA(timeperiod=20)
context.long_ema_trans = EMA(timeperiod=40)
# To keep track of whether we invested in the stock or not
context.invested = False
def handle_data(context, data):
short_ema = context.short_ema_trans.handle_data(data)
long_ema = context.long_ema_trans.handle_data(data)
if short_ema is None or long_ema is None:
return
buy = False
sell = False
if (short_ema > long_ema).all() and not context.invested:
order(context.asset, 100)
context.invested = True
buy = True
elif (short_ema < long_ema).all() and context.invested:
order(context.asset, -100)
context.invested = False
sell = True
record(AAPL=data[context.asset].price,
short_ema=short_ema[context.asset],
long_ema=long_ema[context.asset],
buy=buy,
sell=sell)
if __name__ == '__main__':
from datetime import datetime
import logbook
import matplotlib.pyplot as plt
import pytz
from zipline.algorithm import TradingAlgorithm
from zipline.api import order, record, symbol
from zipline.utils.factory import load_from_yahoo
logbook.StderrHandler().push_application()
start = datetime(2014, 1, 1, 0, 0, 0, 0, pytz.utc)
end = datetime(2014, 11, 1, 0, 0, 0, 0, pytz.utc)
data = load_from_yahoo(stocks=['AAPL'], indexes={}, start=start,
end=end)
algo = TradingAlgorithm(initialize=initialize, handle_data=handle_data,
identifiers=['AAPL'])
results = algo.run(data).dropna()
fig = plt.figure()
ax1 = fig.add_subplot(211, ylabel='portfolio value')
results.portfolio_value.plot(ax=ax1)
ax2 = fig.add_subplot(212)
results[['AAPL', 'short_ema', 'long_ema']].plot(ax=ax2)
ax2.plot(results.ix[results.buy].index, results.short_ema[results.buy],
'^', markersize=10, color='m')
ax2.plot(results.ix[results.sell].index, results.short_ema[results.sell],
'v', markersize=10, color='k')
plt.legend(loc=0)
plt.gcf().set_size_inches(18, 8)
plt.show()
| DVegaCapital/zipline | zipline/examples/dual_ema_talib.py | Python | apache-2.0 | 3,247 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.