repo_name
stringlengths
6
100
path
stringlengths
4
294
copies
stringclasses
981 values
size
stringlengths
4
6
content
stringlengths
606
896k
license
stringclasses
15 values
geotagx/geotagx-pybossa-archive
pybossa/auth/task.py
1
1535
# -*- coding: utf8 -*- # This file is part of PyBossa. # # Copyright (C) 2013 SF Isle of Man Limited # # PyBossa is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # PyBossa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with PyBossa. If not, see <http://www.gnu.org/licenses/>. from flask.ext.login import current_user import pybossa.model as model from pybossa.core import db def create(task=None): if not current_user.is_anonymous(): app = db.session.query(model.App).filter_by(id=task.app_id).one() if app.owner_id == current_user.id or current_user.admin is True: return True else: return False else: return False def read(task=None): return True def update(task): if not current_user.is_anonymous(): app = db.session.query(model.App).filter_by(id=task.app_id).one() if app.owner_id == current_user.id or current_user.admin is True: return True else: return False else: return False def delete(task): return update(task)
agpl-3.0
MasterGowen/moonrain
moonrain/accounts/models.py
1
2939
from django.db import models from django.contrib.auth.models import BaseUserManager, AbstractBaseUser from ..projects.models import Project class UserManager(BaseUserManager): def create_user(self, email, username, password=None): if not email: raise ValueError('Необходимо ввести электронный адрес') user = self.model( email=UserManager.normalize_email(email), username=username, ) user.set_password(password) user.save(using=self._db) return user def create_superuser(self, email, username, password): user = self.create_user(email, password=password, username=username) user.is_admin = True user.save(using=self._db) return user class User(AbstractBaseUser): ''' Пользователь ''' email = models.EmailField( verbose_name='Электронная почта', max_length=32, unique=True, db_index=True, ) username = models.CharField( verbose_name='Имя пользователя', blank=False, max_length=32, unique=True, ) avatar = models.ImageField( verbose_name='Аватар', upload_to='images/%Y/%m', blank=True, ) first_name = models.CharField( verbose_name='Имя', max_length=16, blank=True, ) last_name = models.CharField( verbose_name='Фамилия', max_length=32, blank=True, ) department = models.CharField( verbose_name='Подразделение', max_length=255, blank=True, ) is_admin = models.BooleanField( verbose_name='Является администратором?', default=False, ) is_superuser = models.BooleanField( verbose_name='Является суперпользователем?', default=False, ) projects = models.ManyToManyField(Project, verbose_name='Проекты', blank=True, help_text='Проекты, в которых участвует пользователь',) USERNAME_FIELD = 'email' REQUIRED_FIELDS = ['username'] objects = UserManager() def get_full_name(self): return '%s %s' % (self.last_name, self.first_name,) def get_short_name(self): return self.username def __str__(self): return self.email def has_perm(self, perm, obj=None): return True def has_module_perms(self, app_label): return True @property def is_staff(self): return self.is_admin class Meta: verbose_name = ('Пользователь') verbose_name_plural = ('Пользователи')
gpl-2.0
weety/rt-thread
tools/rt_studio.py
6
34261
import os import re from string import Template import rtconfig import shutil # version MODULE_VER_NUM = 1 cproject_temp = """<?xml version="1.0" encoding="UTF-8" standalone="no"?> <?fileVersion 4.0.0?><cproject storage_type_id="org.eclipse.cdt.core.XmlProjectDescriptionStorage"> <storageModule moduleId="org.eclipse.cdt.core.settings"> <cconfiguration id="ilg.gnuarmeclipse.managedbuild.cross.config.elf.debug.553091094"> <storageModule buildSystemId="org.eclipse.cdt.managedbuilder.core.configurationDataProvider" id="ilg.gnuarmeclipse.managedbuild.cross.config.elf.debug.553091094" moduleId="org.eclipse.cdt.core.settings" name="Debug"> <externalSettings/> <extensions> <extension id="org.eclipse.cdt.core.ELF" point="org.eclipse.cdt.core.BinaryParser"/> <extension id="org.eclipse.cdt.core.GASErrorParser" point="org.eclipse.cdt.core.ErrorParser"/> <extension id="org.eclipse.cdt.core.GmakeErrorParser" point="org.eclipse.cdt.core.ErrorParser"/> <extension id="org.eclipse.cdt.core.GLDErrorParser" point="org.eclipse.cdt.core.ErrorParser"/> <extension id="org.eclipse.cdt.core.CWDLocator" point="org.eclipse.cdt.core.ErrorParser"/> <extension id="org.eclipse.cdt.core.GCCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/> </extensions> </storageModule> <storageModule moduleId="cdtBuildSystem" version="4.0.0"> <configuration artifactName="rtthread" buildArtefactType="org.eclipse.cdt.build.core.buildArtefactType.exe" buildProperties="org.eclipse.cdt.build.core.buildArtefactType=org.eclipse.cdt.build.core.buildArtefactType.exe,org.eclipse.cdt.build.core.buildType=org.eclipse.cdt.build.core.buildType.debug" cleanCommand="${cross_rm} -rf" description="" id="ilg.gnuarmeclipse.managedbuild.cross.config.elf.debug.553091094" name="Debug" parent="ilg.gnuarmeclipse.managedbuild.cross.config.elf.debug"> <folderInfo id="ilg.gnuarmeclipse.managedbuild.cross.config.elf.debug.553091094." name="/" resourcePath=""> <toolChain id="ilg.gnuarmeclipse.managedbuild.cross.toolchain.elf.debug.1201710416" name="ARM Cross GCC" superClass="ilg.gnuarmeclipse.managedbuild.cross.toolchain.elf.debug"> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.addtools.createflash.251260409" name="Create flash image" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.addtools.createflash" useByScannerDiscovery="false" value="true" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.addtools.createlisting.1365878149" name="Create extended listing" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.addtools.createlisting" useByScannerDiscovery="false"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.addtools.printsize.709136944" name="Print size" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.addtools.printsize" useByScannerDiscovery="false" value="true" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.level.1986446770" name="Optimization Level" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.level" useByScannerDiscovery="true" value="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.level.none" valueType="enumerated"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.messagelength.1312975261" name="Message length (-fmessage-length=0)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.messagelength" useByScannerDiscovery="true" value="false" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.signedchar.1538128212" name="'char' is signed (-fsigned-char)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.signedchar" useByScannerDiscovery="true" value="false" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.functionsections.2136804218" name="Function sections (-ffunction-sections)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.functionsections" useByScannerDiscovery="true" value="true" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.datasections.244767666" name="Data sections (-fdata-sections)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.datasections" useByScannerDiscovery="true" value="true" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.level.1055848773" name="Debug level" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.level" useByScannerDiscovery="true" value="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.level.default" valueType="enumerated"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.format.501941135" name="Debug format" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.format" useByScannerDiscovery="true" value="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.format.dwarf2" valueType="enumerated"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.toolchain.name.1696308067" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.toolchain.name" useByScannerDiscovery="false" value="GNU Tools for ARM Embedded Processors" valueType="string"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.architecture.1558403188" name="Architecture" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.architecture" useByScannerDiscovery="false" value="ilg.gnuarmeclipse.managedbuild.cross.option.architecture.arm" valueType="enumerated"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.family.749415257" name="ARM family" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.family" useByScannerDiscovery="false" value="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.mcpu.cortex-m4" valueType="enumerated"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.instructionset.2114153533" name="Instruction set" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.instructionset" useByScannerDiscovery="false" value="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.instructionset.thumb" valueType="enumerated"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.prefix.1600865811" name="Prefix" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.prefix" useByScannerDiscovery="false" value="arm-none-eabi-" valueType="string"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.c.1109963929" name="C compiler" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.c" useByScannerDiscovery="false" value="gcc" valueType="string"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.cpp.1040883831" name="C++ compiler" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.cpp" useByScannerDiscovery="false" value="g++" valueType="string"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.ar.1678200391" name="Archiver" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.ar" useByScannerDiscovery="false" value="ar" valueType="string"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.objcopy.1171840296" name="Hex/Bin converter" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.objcopy" useByScannerDiscovery="false" value="objcopy" valueType="string"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.objdump.342604837" name="Listing generator" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.objdump" useByScannerDiscovery="false" value="objdump" valueType="string"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.size.898269225" name="Size command" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.size" useByScannerDiscovery="false" value="size" valueType="string"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.make.2016398076" name="Build command" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.make" useByScannerDiscovery="false" value="make" valueType="string"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.rm.1606171496" name="Remove command" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.rm" useByScannerDiscovery="false" value="rm" valueType="string"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.toolchain.id.540792084" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.toolchain.id" useByScannerDiscovery="false" value="1287942917" valueType="string"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.architecture.430121817" name="Architecture" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.architecture" useByScannerDiscovery="false" value="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.arch.none" valueType="enumerated"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.fpu.abi.966735324" name="Float ABI" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.fpu.abi" useByScannerDiscovery="true" value="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.fpu.abi.hard" valueType="enumerated"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.warnings.allwarn.1381561249" name="Enable all common warnings (-Wall)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.warnings.allwarn" useByScannerDiscovery="true" value="true" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.target.other.2041717463" name="Other target flags" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.target.other" useByScannerDiscovery="true" value="" valueType="string"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.fpu.unit.1463655269" name="FPU Type" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.fpu.unit" useByScannerDiscovery="true" value="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.fpu.unit.fpv4spd16" valueType="enumerated"/> <targetPlatform archList="all" binaryParser="org.eclipse.cdt.core.ELF" id="ilg.gnuarmeclipse.managedbuild.cross.targetPlatform.1798638225" isAbstract="false" osList="all" superClass="ilg.gnuarmeclipse.managedbuild.cross.targetPlatform"/> <builder buildPath="${workspace_loc:/${ProjName}/Debug" cleanBuildTarget="clean2" id="ilg.gnuarmeclipse.managedbuild.cross.builder.1736709688" keepEnvironmentInBuildfile="false" managedBuildOn="true" name="Gnu Make Builder" parallelBuildOn="true" parallelizationNumber="optimal" superClass="ilg.gnuarmeclipse.managedbuild.cross.builder"/> <tool commandLinePattern="${COMMAND} ${FLAGS} -c ${OUTPUT_FLAG} ${OUTPUT_PREFIX}${OUTPUT} ${INPUTS}" id="ilg.gnuarmeclipse.managedbuild.cross.tool.assembler.1810966071" name="GNU ARM Cross Assembler" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.assembler"> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.usepreprocessor.1072524326" name="Use preprocessor" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.usepreprocessor" useByScannerDiscovery="false" value="true" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.include.paths.161242639" name="Include paths (-I)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.include.paths" useByScannerDiscovery="true"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.defs.1521934876" name="Defined symbols (-D)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.defs" useByScannerDiscovery="true"/> <option IS_BUILTIN_EMPTY="false" IS_VALUE_EMPTY="false" id="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.flags.1325367962" name="Assembler flags" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.flags" useByScannerDiscovery="false" valueType="stringList"> <listOptionValue builtIn="false" value="-mimplicit-it=thumb"/> </option> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.other.647856572" name="Other assembler flags" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.other" useByScannerDiscovery="false" value="a_misc_flag" valueType="string"/> <inputType id="ilg.gnuarmeclipse.managedbuild.cross.tool.assembler.input.1843333483" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.assembler.input"/> </tool> <tool commandLinePattern="${COMMAND} ${FLAGS} -c ${OUTPUT_FLAG} ${OUTPUT_PREFIX}${OUTPUT} ${INPUTS}" id="ilg.gnuarmeclipse.managedbuild.cross.tool.c.compiler.1570350559" name="GNU ARM Cross C Compiler" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.c.compiler"> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.include.paths.634882052" name="Include paths (-I)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.include.paths" useByScannerDiscovery="true"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.defs.100549972" name="Defined symbols (-D)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.defs" useByScannerDiscovery="true"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.other.2133065240" name="Other compiler flags" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.other" useByScannerDiscovery="true" value="c_misc_flag" valueType="string"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.include.files.714348818" name="Include files (-include)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.include.files" useByScannerDiscovery="true"/> <inputType id="ilg.gnuarmeclipse.managedbuild.cross.tool.c.compiler.input.992053063" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.c.compiler.input"/> </tool> <tool commandLinePattern="${COMMAND} ${FLAGS} ${OUTPUT_FLAG} ${OUTPUT_PREFIX}${OUTPUT} ${INPUTS}" id="ilg.gnuarmeclipse.managedbuild.cross.tool.c.linker.869072473" name="Cross ARM C Linker" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.c.linker"> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.gcsections.1167322178" name="Remove unused sections (-Xlinker --gc-sections)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.gcsections" useByScannerDiscovery="false" value="true" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.nostart.351692886" name="Do not use standard start files (-nostartfiles)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.nostart" useByScannerDiscovery="false" value="false" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.nostdlibs.1009243715" name="No startup or default libs (-nostdlib)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.nostdlibs" useByScannerDiscovery="false" value="false" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.nodeflibs.2016026082" name="Do not use default libraries (-nodefaultlibs)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.nodeflibs" useByScannerDiscovery="false" value="false" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.usenewlibnano.923990336" name="Use newlib-nano (--specs=nano.specs)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.usenewlibnano" useByScannerDiscovery="false" value="false" valueType="boolean"/> <option defaultValue="true" id="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.shared.548869459" name="Shared (-shared)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.shared" useByScannerDiscovery="false" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.scriptfile.1818777301" name="Script files (-T)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.scriptfile" useByScannerDiscovery="false"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.libs.1135656995" name="Libraries (-l)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.libs" useByScannerDiscovery="false"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.paths.36884122" name="Library search path (-L)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.paths" useByScannerDiscovery="false"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.other.396049466" name="Other linker flags" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.other" useByScannerDiscovery="false" value="c_link_misc_flag" valueType="string"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.cref.1645737861" name="Cross reference (-Xlinker --cref)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.cref" useByScannerDiscovery="false" value="true" valueType="boolean"/> <inputType id="ilg.gnuarmeclipse.managedbuild.cross.tool.c.linker.input.334732222" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.c.linker.input"> <additionalInput kind="additionalinputdependency" paths="$(USER_OBJS)"/> <additionalInput kind="additionalinput" paths="$(LIBS)"/> </inputType> </tool> <tool commandLinePattern="${COMMAND} ${FLAGS} ${OUTPUT_FLAG} ${OUTPUT_PREFIX}${OUTPUT} ${INPUTS}" id="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.linker.1601059928" name="GNU ARM Cross C++ Linker" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.linker"> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.gcsections.437759352" name="Remove unused sections (-Xlinker --gc-sections)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.gcsections" useByScannerDiscovery="false" value="true" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.scriptfile.1101974459" name="Script files (-T)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.scriptfile" useByScannerDiscovery="false"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.cref.2007675975" name="Cross reference (-Xlinker --cref)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.cref" useByScannerDiscovery="false" value="true" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.usenewlibnano.2105838438" name="Use newlib-nano (--specs=nano.specs)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.usenewlibnano" useByScannerDiscovery="false" value="true" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.libs.934137837" name="Libraries (-l)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.libs" useByScannerDiscovery="false"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.nostart.2118356996" name="Do not use standard start files (-nostartfiles)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.nostart" useByScannerDiscovery="false" value="false" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.nodeflibs.1427884346" name="Do not use default libraries (-nodefaultlibs)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.nodeflibs" useByScannerDiscovery="false" value="false" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.nostdlibs.1433863653" name="No startup or default libs (-nostdlib)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.nostdlibs" useByScannerDiscovery="false" value="false" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.printgcsections.1387745410" name="Print removed sections (-Xlinker --print-gc-sections)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.printgcsections" useByScannerDiscovery="false" value="false" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.strip.1230158061" name="Omit all symbol information (-s)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.strip" useByScannerDiscovery="false" value="false" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.printmap.1307581821" name="Print link map (-Xlinker --print-map)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.printmap" useByScannerDiscovery="false" value="false" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.useprintffloat.960778920" name="Use float with nano printf (-u _printf_float)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.useprintffloat" useByScannerDiscovery="false" value="false" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.usescanffloat.637205035" name="Use float with nano scanf (-u _scanf_float)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.usescanffloat" useByScannerDiscovery="false" value="false" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.usenewlibnosys.1948314201" name="Do not use syscalls (--specs=nosys.specs)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.usenewlibnosys" useByScannerDiscovery="false" value="false" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.verbose.273162112" name="Verbose (-v)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.verbose" useByScannerDiscovery="false" value="false" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.paths.1399535143" name="Library search path (-L)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.paths" useByScannerDiscovery="false"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.other.882307902" name="Other linker flags" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.other" useByScannerDiscovery="false" value="cpp_link_misc_flag" valueType="string"/> <inputType id="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.linker.input.262373798" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.linker.input"> <additionalInput kind="additionalinputdependency" paths="$(USER_OBJS)"/> <additionalInput kind="additionalinput" paths="$(LIBS)"/> </inputType> </tool> <tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.archiver.506412204" name="GNU ARM Cross Archiver" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.archiver"/> <tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.createflash.1461589245" name="GNU ARM Cross Create Flash Image" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.createflash"> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.createflash.choice.1937707052" name="Output file format (-O)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createflash.choice" useByScannerDiscovery="false" value="ilg.gnuarmeclipse.managedbuild.cross.option.createflash.choice.binary" valueType="enumerated"/> </tool> <tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.createlisting.82359725" name="GNU ARM Cross Create Listing" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.createlisting"> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.source.601724476" name="Display source (--source|-S)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.source" value="true" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.allheaders.692505279" name="Display all headers (--all-headers|-x)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.allheaders" value="true" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.demangle.97345172" name="Demangle names (--demangle|-C)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.demangle" value="true" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.linenumbers.1342893377" name="Display line numbers (--line-numbers|-l)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.linenumbers" value="true" valueType="boolean"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.wide.1533725981" name="Wide lines (--wide|-w)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.wide" value="true" valueType="boolean"/> </tool> <tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.printsize.1073550295" name="GNU ARM Cross Print Size" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.printsize"> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.printsize.format.946451386" name="Size format" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.printsize.format" useByScannerDiscovery="false"/> </tool> <tool commandLinePattern="${COMMAND} ${FLAGS} -c ${OUTPUT_FLAG} ${OUTPUT_PREFIX}${OUTPUT} ${INPUTS}" id="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.compiler.1302177015" name="GNU ARM Cross C++ Compiler" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.compiler"> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.defs.704468062" name="Defined symbols (-D)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.defs" useByScannerDiscovery="true"/> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.include.paths.302877723" name="Include paths (-I)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.include.paths" useByScannerDiscovery="true"/> <option IS_BUILTIN_EMPTY="false" IS_VALUE_EMPTY="false" id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.include.files.343249373" name="Include files (-include)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.include.files" useByScannerDiscovery="true" valueType="includeFiles"> <listOptionValue builtIn="false" value="&quot;${workspace_loc:/${ProjName}/rtconfig_preinc.h}&quot;"/> </option> <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.other.465079095" name="Other compiler flags" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.other" useByScannerDiscovery="true" value="cpp_misc_flag" valueType="string"/> <inputType id="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.compiler.input.45918001" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.compiler.input"/> </tool> </toolChain> </folderInfo> <sourceEntries> <entry excluding="|" flags="VALUE_WORKSPACE_PATH|RESOLVED" kind="sourcePath" name=""/> </sourceEntries> </configuration> </storageModule> <storageModule moduleId="org.eclipse.cdt.core.externalSettings"/> </cconfiguration> </storageModule> <storageModule moduleId="cdtBuildSystem" version="4.0.0"> <project id="qemu-vexpress-a9.ilg.gnuarmeclipse.managedbuild.cross.target.elf.860020518" name="Executable" projectType="ilg.gnuarmeclipse.managedbuild.cross.target.elf"/> </storageModule> <storageModule moduleId="scannerConfiguration"> <autodiscovery enabled="true" problemReportingEnabled="true" selectedProfileId=""/> <scannerConfigBuildInfo instanceId="ilg.gnuarmeclipse.managedbuild.cross.config.elf.debug.553091094;ilg.gnuarmeclipse.managedbuild.cross.config.elf.debug.553091094.;ilg.gnuarmeclipse.managedbuild.cross.tool.c.compiler.1570350559;ilg.gnuarmeclipse.managedbuild.cross.tool.c.compiler.input.992053063"> <autodiscovery enabled="true" problemReportingEnabled="true" selectedProfileId=""/> </scannerConfigBuildInfo> </storageModule> <storageModule moduleId="org.eclipse.cdt.core.LanguageSettingsProviders"/> <storageModule moduleId="refreshScope" versionNumber="2"> <configuration configurationName="Debug"> <resource resourceType="PROJECT" workspacePath="/f429_tmp"/> </configuration> </storageModule> <storageModule moduleId="org.eclipse.cdt.make.core.buildtargets"/> <storageModule moduleId="org.eclipse.cdt.internal.ui.text.commentOwnerProjectMappings"> <doc-comment-owner id="org.eclipse.cdt.ui.doxygen"> <path value=""/> </doc-comment-owner> </storageModule> </cproject>""" project_temp = """<?xml version="1.0" encoding="UTF-8"?> <projectDescription> <name>__project_name_flag__</name> <comment></comment> <projects> </projects> <buildSpec> <buildCommand> <name>org.eclipse.cdt.managedbuilder.core.genmakebuilder</name> <triggers>clean,full,incremental,</triggers> <arguments> </arguments> </buildCommand> <buildCommand> <name>org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder</name> <triggers>full,incremental,</triggers> <arguments> </arguments> </buildCommand> </buildSpec> <natures> <nature>org.eclipse.cdt.core.cnature</nature> <nature>org.rt-thread.studio.rttnature</nature> <nature>org.eclipse.cdt.managedbuilder.core.managedBuildNature</nature> <nature>org.eclipse.cdt.managedbuilder.core.ScannerConfigNature</nature> </natures> </projectDescription>""" projcfg_ini_temp = """#RT-Thread Studio Project Configuration #Sat Jan 16 15:18:32 CST 2021 project_type=rtt chip_name=${chip_name} cpu_name=None target_freq= clock_source= dvendor_name= rx_pin_name= rtt_path= source_freq= csp_path= sub_series_name= selected_rtt_version=latest cfg_version=v3.0 tool_chain=gcc uart_name= tx_pin_name= rtt_nano_path= output_project_path= hardware_adapter=J-Link project_name=${project_name}""" eclipse_core_runtime_temp = """content-types/enabled=true content-types/org.eclipse.cdt.core.asmSource/file-extensions=s eclipse.preferences.version=1""" makefile_targets_temp = """clean2: \t-$(RM) $(CC_DEPS)$(C++_DEPS)$(C_UPPER_DEPS)$(CXX_DEPS)$(SECONDARY_FLASH)$(SECONDARY_SIZE)$(ASM_DEPS)$(S_UPPER_DEPS)$(C_DEPS)$(CPP_DEPS) \t-$(RM) $(OBJS) *.elf \t-@echo ' ' *.elf: $(wildcard ../linkscripts/*/*.lds) $(wildcard ../linkscripts/*/*/*.lds)""" def get_mcu_info(uvproj_file_path): if os.path.exists(uvproj_file_path): with open(uvproj_file_path, mode='r') as f: data = f.read() result = re.search("<Device>(.*)</Device>", data) if result: return result.group(1) else: return "unknown" else: return "unknown" def gen_makefile_targets(output_file_path): try: w_str = makefile_targets_temp dir_name = os.path.dirname(output_file_path) if not os.path.exists(dir_name): os.makedirs(dir_name) with open(output_file_path, 'w') as f: f.write(w_str) return True except Exception as e: print(e) return False def gen_org_eclipse_core_runtime_prefs(output_file_path): try: w_str = eclipse_core_runtime_temp dir_name = os.path.dirname(output_file_path) if not os.path.exists(dir_name): os.makedirs(dir_name) with open(output_file_path, 'w') as f: f.write(w_str) return True except Exception as e: print(e) return False def gen_cproject_file(output_file_path): template_file_path = os.path.join(os.path.dirname(output_file_path), "template.cproject") if os.path.exists(template_file_path): try: shutil.copy(template_file_path, output_file_path) except Exception as e: print(e) return True else: CFLAGS = rtconfig.CFLAGS AFLAGS = rtconfig.AFLAGS LFLAGS = rtconfig.LFLAGS if 'CXXFLAGS' in dir(rtconfig): CXXFLAGS = rtconfig.CXXFLAGS else: CXXFLAGS = "" if "-T" in LFLAGS: items = str(LFLAGS).split() t_index = items.index("-T") items[t_index] = "" items[t_index + 1] = "" LFLAGS = " ".join(items) try: w_str = cproject_temp if "a_misc_flag" in w_str: w_str = w_str.replace("a_misc_flag", AFLAGS) if "c_misc_flag" in w_str: w_str = w_str.replace("c_misc_flag", CFLAGS) if "cpp_misc_flag" in w_str: w_str = w_str.replace("cpp_misc_flag", CXXFLAGS) if "c_link_misc_flag" in w_str: w_str = w_str.replace("c_link_misc_flag", LFLAGS) if "cpp_link_misc_flag" in w_str: w_str = w_str.replace("cpp_link_misc_flag", LFLAGS) dir_name = os.path.dirname(output_file_path) if not os.path.exists(dir_name): os.makedirs(dir_name) with open(output_file_path, 'w') as f: f.write(w_str) return True except Exception as e: return False def gen_project_file(output_file_path): try: w_str = project_temp dir_name = os.path.dirname(output_file_path) if not os.path.exists(dir_name): os.makedirs(dir_name) with open(output_file_path, 'w') as f: f.write(w_str) return True except Exception as e: return False def gen_projcfg_ini_file(chip_name, project_name, output_file_path): try: projcfg_file_tmp = Template(projcfg_ini_temp) w_str = projcfg_file_tmp.substitute(project_name=project_name, chip_name=(chip_name)) dir_name = os.path.dirname(output_file_path) if not os.path.exists(dir_name): os.makedirs(dir_name) with open(output_file_path, 'w') as f: f.write(w_str) return True except Exception as e: return False
apache-2.0
rosudrag/Freemium-winner
VirtualEnvironment/Lib/site-packages/pip-7.1.0-py3.4.egg/pip/locations.py
59
6362
"""Locations where we look for configs, install stuff, etc""" from __future__ import absolute_import import getpass import os import os.path import site import sys from distutils import sysconfig from distutils.command.install import install, SCHEME_KEYS # noqa from pip.compat import WINDOWS from pip.utils import appdirs # CA Bundle Locations CA_BUNDLE_PATHS = [ # Debian/Ubuntu/Gentoo etc. "/etc/ssl/certs/ca-certificates.crt", # Fedora/RHEL "/etc/pki/tls/certs/ca-bundle.crt", # OpenSUSE "/etc/ssl/ca-bundle.pem", # OpenBSD "/etc/ssl/cert.pem", # FreeBSD/DragonFly "/usr/local/share/certs/ca-root-nss.crt", # Homebrew on OSX "/usr/local/etc/openssl/cert.pem", ] # Attempt to locate a CA Bundle that we can pass into requests, we have a list # of possible ones from various systems. If we cannot find one then we'll set # this to None so that we default to whatever requests is setup to handle. # # Note to Downstream: If you wish to disable this autodetection and simply use # whatever requests does (likely you've already patched # requests.certs.where()) then simply edit this line so # that it reads ``CA_BUNDLE_PATH = None``. CA_BUNDLE_PATH = next((x for x in CA_BUNDLE_PATHS if os.path.exists(x)), None) # Application Directories USER_CACHE_DIR = appdirs.user_cache_dir("pip") DELETE_MARKER_MESSAGE = '''\ This file is placed here by pip to indicate the source was put here by pip. Once this package is successfully installed this source code will be deleted (unless you remove this file). ''' PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt' def write_delete_marker_file(directory): """ Write the pip delete marker file into this directory. """ filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME) with open(filepath, 'w') as marker_fp: marker_fp.write(DELETE_MARKER_MESSAGE) def running_under_virtualenv(): """ Return True if we're running inside a virtualenv, False otherwise. """ if hasattr(sys, 'real_prefix'): return True elif sys.prefix != getattr(sys, "base_prefix", sys.prefix): return True return False def virtualenv_no_global(): """ Return True if in a venv and no system site packages. """ # this mirrors the logic in virtualenv.py for locating the # no-global-site-packages.txt file site_mod_dir = os.path.dirname(os.path.abspath(site.__file__)) no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt') if running_under_virtualenv() and os.path.isfile(no_global_file): return True def __get_username(): """ Returns the effective username of the current process. """ if WINDOWS: return getpass.getuser() import pwd return pwd.getpwuid(os.geteuid()).pw_name if running_under_virtualenv(): src_prefix = os.path.join(sys.prefix, 'src') else: # FIXME: keep src in cwd for now (it is not a temporary folder) try: src_prefix = os.path.join(os.getcwd(), 'src') except OSError: # In case the current working directory has been renamed or deleted sys.exit( "The folder you are executing pip from can no longer be found." ) # under Mac OS X + virtualenv sys.prefix is not properly resolved # it is something like /path/to/python/bin/.. # Note: using realpath due to tmp dirs on OSX being symlinks src_prefix = os.path.abspath(src_prefix) # FIXME doesn't account for venv linked to global site-packages site_packages = sysconfig.get_python_lib() user_site = site.USER_SITE user_dir = os.path.expanduser('~') if WINDOWS: bin_py = os.path.join(sys.prefix, 'Scripts') bin_user = os.path.join(user_site, 'Scripts') # buildout uses 'bin' on Windows too? if not os.path.exists(bin_py): bin_py = os.path.join(sys.prefix, 'bin') bin_user = os.path.join(user_site, 'bin') config_basename = 'pip.ini' legacy_storage_dir = os.path.join(user_dir, 'pip') legacy_config_file = os.path.join( legacy_storage_dir, config_basename, ) else: bin_py = os.path.join(sys.prefix, 'bin') bin_user = os.path.join(user_site, 'bin') config_basename = 'pip.conf' legacy_storage_dir = os.path.join(user_dir, '.pip') legacy_config_file = os.path.join( legacy_storage_dir, config_basename, ) # Forcing to use /usr/local/bin for standard Mac OS X framework installs # Also log to ~/Library/Logs/ for use with the Console.app log viewer if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/': bin_py = '/usr/local/bin' site_config_files = [ os.path.join(path, config_basename) for path in appdirs.site_config_dirs('pip') ] def distutils_scheme(dist_name, user=False, home=None, root=None, isolated=False): """ Return a distutils install scheme """ from distutils.dist import Distribution scheme = {} if isolated: extra_dist_args = {"script_args": ["--no-user-cfg"]} else: extra_dist_args = {} dist_args = {'name': dist_name} dist_args.update(extra_dist_args) d = Distribution(dist_args) d.parse_config_files() i = d.get_command_obj('install', create=True) # NOTE: setting user or home has the side-effect of creating the home dir # or user base for installations during finalize_options() # ideally, we'd prefer a scheme class that has no side-effects. i.user = user or i.user if user: i.prefix = "" i.home = home or i.home i.root = root or i.root i.finalize_options() for key in SCHEME_KEYS: scheme[key] = getattr(i, 'install_' + key) if i.install_lib is not None: # install_lib takes precedence over purelib and platlib scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) if running_under_virtualenv(): scheme['headers'] = os.path.join( sys.prefix, 'include', 'site', 'python' + sys.version[:3], dist_name, ) if root is not None: scheme["headers"] = os.path.join( root, os.path.abspath(scheme["headers"])[1:], ) return scheme
mit
dfranco/shinken
test/test_db_mysql.py
19
1856
#!/usr/bin/env python # Copyright (C) 2009-2014: # Gabes Jean, [email protected] # Gerhard Lausser, [email protected] # # This file is part of Shinken. # # Shinken is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Shinken is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Shinken. If not, see <http://www.gnu.org/licenses/>. # # This file is used to test reading and processing of config files # from shinken_test import * try: from shinken.db_mysql import DBMysql except ImportError: # Oups this server do not have mysql installed, skip this test DBMysql = None class TestConfig(ShinkenTest): # setUp is inherited from ShinkenTest def create_db(self): self.db = DBMysql(host='localhost', user='root', password='root', database='merlin', character_set='utf8') def test_connect_database(self): if not DBMysql: return self.create_db() try: self.db.connect_database() except Exception: # arg, no database here? sic! pass def test_execute_query(self): if not DBMysql: return self.create_db() try: self.db.connect_database() q = "DELETE FROM service WHERE instance_id = '0'" self.db.execute_query(q) except Exception: pass if __name__ == '__main__': unittest.main()
agpl-3.0
lmEshoo/st2contrib
packs/dripstat/sensors/dripstat_alert_sensor.py
12
2855
import eventlet import requests from datetime import datetime from st2reactor.sensor.base import PollingSensor __all_ = [ 'DripstatAlertSensor' ] BASE_URL = 'https://api.dripstat.com/api/v1' eventlet.monkey_patch( os=True, select=True, socket=True, thread=True, time=True) class DripstatAlertSensor(PollingSensor): def __init__(self, sensor_service, config=None, poll_interval=30): super(DripstatAlertSensor, self).__init__(sensor_service=sensor_service, config=config, poll_interval=poll_interval) self._trigger_ref = 'dripstat.alert' self._log = self._sensor_service.get_logger(__name__) def setup(self): self._api_key = self._config['api_key'] self._applications = self._api_request(endpoint='/apps') def poll(self): for application in self._applications: params = {'appId': application['id']} alerts = self._api_request(endpoint='/activeAlerts', params=params) for alert in alerts: last_alert_timestamp = self._get_last_alert_timestamp(application['name']) epoch = int(alert['startedAt']) / 1000 if epoch > last_alert_timestamp: self._set_last_alert_timestamp(application['name'], epoch) self._dispatch_trigger_for_alert(application=application['name'], alert=alert, epoch=epoch) def cleanup(self): pass def add_trigger(self, trigger): pass def update_trigger(self, trigger): pass def remove_trigger(self, trigger): pass def _api_request(self, endpoint, params={}): url = BASE_URL + endpoint default_params = {'clientId': self._api_key} params.update(default_params) response = requests.get(url, params=params) return response.json() def _dispatch_trigger_for_alert(self, application, alert, epoch): trigger = self._trigger_ref payload = { 'app_name': application, 'alert_type': alert['name'], 'started_at': epoch, 'started_at_iso8601': datetime.fromtimestamp(epoch).isoformat(), 'jvm_host': alert['jvmHost'] } self._sensor_service.dispatch(trigger=trigger, payload=payload) def _get_last_alert_timestamp(self, app): last_alert_timestamp = self._sensor_service.get_value("%s.last_alert_timestamp" % app) if last_alert_timestamp: return int(last_alert_timestamp) else: return 0 def _set_last_alert_timestamp(self, app, timestamp): self._sensor_service.set_value(name='%s.last_alert_timestamp' % app, value=str(timestamp))
apache-2.0
agaveapi/SC17-container-tutorial
content/images/jupyter/examples/setvars.py
1
2421
# Here we define some utility commands to simplify interaction with the shell. # You don't need to read or understand this, but it's here in case you want to. import re import os def repvar(v): """ repvar() is short for "Replace Variables." The idea is that this function looks for strings of the form $VAR or ${VAR} or even $(CMD) in the input string and replaces them, either with the contents of os.environ[VAR] or os.pipe(CMD), mimicking the behavior of bash. If a backslace precedes the $, then the backslash will be removed but the string will not be evaluated. Thus: ${HOME} becomes "/home/user" $HOME becomes "/home/usr" $(echo Hello) becomes "Hello" \$HOME becomes $HOME """ epos = 0 buf = '' for g in re.finditer(r'\$((\w+)|\{([^}]*)\}|\(([^())]*)\))|(\\+\$)',v): if g: i = 2 while g.group(i) == None: i += 1 p = g.start(0) buf += v[epos:p] epos = p + len(g.group(0)) if i == 4: fh = os.popen(g.group(i),"r") c = repvar(fh.read()) fh.close() elif i == 5: c = '$' else: if not g.group(i) in os.environ: raise Exception("no such environment variable: "+g.group(i)) c = repvar(os.environ[g.group(i)]) buf += c else: break buf += v[epos:] return buf.strip() def setvar(e): """ setvar() emulates the ability of BASH to set environment variables. Thus, NAME=VALUE will set os.environ["NAME"]="VALUE". Bash-style comments will be stripped, and bash-line continuations will be processed. """ e = re.sub(r'#[^\r\n]*','',e) e = re.sub(r'\\\n\s*','',e) for m in re.finditer(r'(?m)(\w+)=(.*)',e): k = m.group(1) v = repvar(m.group(2)) print(k+"="+v) os.environ[k]=v def readfile(f): """ Reads in a file. repvar() will be applied to the file name. """ n = repvar(f) print("Reading file `"+n+"'") fh = open(n) c = fh.read() fh.close() return c def writefile(f,c): """ Writes out a file. repvar() will be applied both to the file name and the file contents. """ n = repvar(f) print("Writing file `"+n+"'") fh = open(n,"w") fh.write(repvar(c)) fh.close()
bsd-3-clause
wklken/flask
flask/testsuite/views.py
561
5068
# -*- coding: utf-8 -*- """ flask.testsuite.views ~~~~~~~~~~~~~~~~~~~~~ Pluggable views. :copyright: (c) 2011 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ import flask import flask.views import unittest from flask.testsuite import FlaskTestCase from werkzeug.http import parse_set_header class ViewTestCase(FlaskTestCase): def common_test(self, app): c = app.test_client() self.assert_equal(c.get('/').data, b'GET') self.assert_equal(c.post('/').data, b'POST') self.assert_equal(c.put('/').status_code, 405) meths = parse_set_header(c.open('/', method='OPTIONS').headers['Allow']) self.assert_equal(sorted(meths), ['GET', 'HEAD', 'OPTIONS', 'POST']) def test_basic_view(self): app = flask.Flask(__name__) class Index(flask.views.View): methods = ['GET', 'POST'] def dispatch_request(self): return flask.request.method app.add_url_rule('/', view_func=Index.as_view('index')) self.common_test(app) def test_method_based_view(self): app = flask.Flask(__name__) class Index(flask.views.MethodView): def get(self): return 'GET' def post(self): return 'POST' app.add_url_rule('/', view_func=Index.as_view('index')) self.common_test(app) def test_view_patching(self): app = flask.Flask(__name__) class Index(flask.views.MethodView): def get(self): 1 // 0 def post(self): 1 // 0 class Other(Index): def get(self): return 'GET' def post(self): return 'POST' view = Index.as_view('index') view.view_class = Other app.add_url_rule('/', view_func=view) self.common_test(app) def test_view_inheritance(self): app = flask.Flask(__name__) class Index(flask.views.MethodView): def get(self): return 'GET' def post(self): return 'POST' class BetterIndex(Index): def delete(self): return 'DELETE' app.add_url_rule('/', view_func=BetterIndex.as_view('index')) c = app.test_client() meths = parse_set_header(c.open('/', method='OPTIONS').headers['Allow']) self.assert_equal(sorted(meths), ['DELETE', 'GET', 'HEAD', 'OPTIONS', 'POST']) def test_view_decorators(self): app = flask.Flask(__name__) def add_x_parachute(f): def new_function(*args, **kwargs): resp = flask.make_response(f(*args, **kwargs)) resp.headers['X-Parachute'] = 'awesome' return resp return new_function class Index(flask.views.View): decorators = [add_x_parachute] def dispatch_request(self): return 'Awesome' app.add_url_rule('/', view_func=Index.as_view('index')) c = app.test_client() rv = c.get('/') self.assert_equal(rv.headers['X-Parachute'], 'awesome') self.assert_equal(rv.data, b'Awesome') def test_implicit_head(self): app = flask.Flask(__name__) class Index(flask.views.MethodView): def get(self): return flask.Response('Blub', headers={ 'X-Method': flask.request.method }) app.add_url_rule('/', view_func=Index.as_view('index')) c = app.test_client() rv = c.get('/') self.assert_equal(rv.data, b'Blub') self.assert_equal(rv.headers['X-Method'], 'GET') rv = c.head('/') self.assert_equal(rv.data, b'') self.assert_equal(rv.headers['X-Method'], 'HEAD') def test_explicit_head(self): app = flask.Flask(__name__) class Index(flask.views.MethodView): def get(self): return 'GET' def head(self): return flask.Response('', headers={'X-Method': 'HEAD'}) app.add_url_rule('/', view_func=Index.as_view('index')) c = app.test_client() rv = c.get('/') self.assert_equal(rv.data, b'GET') rv = c.head('/') self.assert_equal(rv.data, b'') self.assert_equal(rv.headers['X-Method'], 'HEAD') def test_endpoint_override(self): app = flask.Flask(__name__) app.debug = True class Index(flask.views.View): methods = ['GET', 'POST'] def dispatch_request(self): return flask.request.method app.add_url_rule('/', view_func=Index.as_view('index')) with self.assert_raises(AssertionError): app.add_url_rule('/', view_func=Index.as_view('index')) # But these tests should still pass. We just log a warning. self.common_test(app) def suite(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(ViewTestCase)) return suite
bsd-3-clause
EmmanuelJohnson/ssquiz
flask/lib/python2.7/site-packages/openid/__init__.py
139
1623
""" This package is an implementation of the OpenID specification in Python. It contains code for both server and consumer implementations. For information on implementing an OpenID consumer, see the C{L{openid.consumer.consumer}} module. For information on implementing an OpenID server, see the C{L{openid.server.server}} module. @contact: U{http://openid.net/developers/dev-mailing-lists/ <http://openid.net/developers/dev-mailing-lists/} @copyright: (C) 2005-2008 JanRain, Inc. @license: Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at U{http://www.apache.org/licenses/LICENSE-2.0} Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ __version__ = '[library version:2.2.1]'[17:-1] __all__ = [ 'association', 'consumer', 'cryptutil', 'dh', 'extension', 'extensions', 'fetchers', 'kvform', 'message', 'oidutil', 'server', 'sreg', 'store', 'urinorm', 'yadis', ] # Parse the version info try: version_info = map(int, __version__.split('.')) except ValueError: version_info = (None, None, None) else: if len(version_info) != 3: version_info = (None, None, None) else: version_info = tuple(version_info)
bsd-3-clause
jaredweiss/nupic
tests/integration/nupic/algorithms/temporal_memory_performance_test.py
9
4753
#!/usr/bin/env python # ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2014, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- import time import unittest import numpy from nupic.data.generators.pattern_machine import PatternMachine from nupic.data.generators.sequence_machine import SequenceMachine from nupic.research.temporal_memory import TemporalMemory from nupic.research.TP import TP from nupic.research.TP10X2 import TP10X2 # ============================== # Tests # ============================== class TemporalMemoryPerformanceTest(unittest.TestCase): def setUp(self): self.tm = TemporalMemory(columnDimensions=[2048], cellsPerColumn=32, initialPermanence=0.5, connectedPermanence=0.8, minThreshold=10, maxNewSynapseCount=12, permanenceIncrement=0.1, permanenceDecrement=0.05, activationThreshold=15) self.tp = TP(numberOfCols=2048, cellsPerColumn=32, initialPerm=0.5, connectedPerm=0.8, minThreshold=10, newSynapseCount=12, permanenceInc=0.1, permanenceDec=0.05, activationThreshold=15, globalDecay=0, burnIn=1, checkSynapseConsistency=False, pamLength=1) self.tp10x2 = TP10X2(numberOfCols=2048, cellsPerColumn=32, initialPerm=0.5, connectedPerm=0.8, minThreshold=10, newSynapseCount=12, permanenceInc=0.1, permanenceDec=0.05, activationThreshold=15, globalDecay=0, burnIn=1, checkSynapseConsistency=False, pamLength=1) self.patternMachine = PatternMachine(2048, 40, num=100) self.sequenceMachine = SequenceMachine(self.patternMachine) def testSingleSequence(self): print "Test: Single sequence" sequence = self.sequenceMachine.generateFromNumbers(range(50)) times = self._feedAll(sequence) self.assertTrue(times[0] < times[1]) self.assertTrue(times[2] < times[1]) self.assertTrue(times[2] < times[0]) # ============================== # Helper functions # ============================== def _feedAll(self, sequence, learn=True, num=1): repeatedSequence = sequence * num times = [] def tmComputeFn(pattern, instance): instance.compute(pattern, learn) def tpComputeFn(pattern, instance): array = self._patternToNumpyArray(pattern) instance.compute(array, enableLearn=learn, computeInfOutput=True) elapsed = self._feedOne(repeatedSequence, self.tm, tmComputeFn) times.append(elapsed) print "TM:\t{0}s".format(elapsed) elapsed = self._feedOne(repeatedSequence, self.tp, tpComputeFn) times.append(elapsed) print "TP:\t{0}s".format(elapsed) elapsed = self._feedOne(repeatedSequence, self.tp10x2, tpComputeFn) times.append(elapsed) print "TP10X2:\t{0}s".format(elapsed) return times @staticmethod def _feedOne(sequence, instance, computeFn): start = time.clock() for pattern in sequence: if pattern == None: instance.reset() else: computeFn(pattern, instance) elapsed = time.clock() - start return elapsed @staticmethod def _patternToNumpyArray(pattern): array = numpy.zeros(2048, dtype='int32') array[list(pattern)] = 1 return array # ============================== # Main # ============================== if __name__ == "__main__": unittest.main()
gpl-3.0
atomicjets/twitter-for-bigquery
libs/requests/packages/charade/__init__.py
122
1327
######################## BEGIN LICENSE BLOCK ######################## # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### __version__ = "1.0.3" from sys import version_info def detect(aBuf): if ((version_info < (3, 0) and isinstance(aBuf, unicode)) or (version_info >= (3, 0) and not isinstance(aBuf, bytes))): raise ValueError('Expected a bytes object, not a unicode object') from . import universaldetector u = universaldetector.UniversalDetector() u.reset() u.feed(aBuf) u.close() return u.result
apache-2.0
ltilve/chromium
tools/telemetry/telemetry/core/browser_finder.py
3
5942
# Copyright 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Finds browsers that can be controlled by telemetry.""" import logging import operator from telemetry.core.backends.chrome import android_browser_finder from telemetry.core.backends.chrome import cros_browser_finder from telemetry.core.backends.chrome import desktop_browser_finder from telemetry.core.backends.chrome import ios_browser_finder from telemetry.core.backends.remote import trybot_browser_finder from telemetry.core.backends.webdriver import webdriver_desktop_browser_finder from telemetry.core import browser_finder_exceptions from telemetry.core import device_finder from telemetry import decorators BROWSER_FINDERS = [ desktop_browser_finder, android_browser_finder, cros_browser_finder, ios_browser_finder, trybot_browser_finder, webdriver_desktop_browser_finder, ] def FindAllBrowserTypes(options): return reduce(operator.add, [bf.FindAllBrowserTypes(options) for bf in BROWSER_FINDERS]) @decorators.Cache def FindBrowser(options): """Finds the best PossibleBrowser object given a BrowserOptions object. Args: A BrowserOptions object. Returns: A PossibleBrowser object. Raises: BrowserFinderException: Options improperly set, or an error occurred. """ if options.browser_type == 'exact' and options.browser_executable == None: raise browser_finder_exceptions.BrowserFinderException( '--browser=exact requires --browser-executable to be set.') if options.browser_type != 'exact' and options.browser_executable != None: raise browser_finder_exceptions.BrowserFinderException( '--browser-executable requires --browser=exact.') if options.browser_type == 'cros-chrome' and options.cros_remote == None: raise browser_finder_exceptions.BrowserFinderException( 'browser_type=cros-chrome requires cros_remote be set.') if (options.browser_type != 'cros-chrome' and options.browser_type != 'cros-chrome-guest' and options.cros_remote != None): raise browser_finder_exceptions.BrowserFinderException( '--remote requires --browser=cros-chrome or cros-chrome-guest.') devices = device_finder.GetDevicesMatchingOptions(options) browsers = [] default_browsers = [] for device in devices: for finder in BROWSER_FINDERS: if(options.browser_type and options.browser_type != 'any' and options.browser_type not in finder.FindAllBrowserTypes(options)): continue curr_browsers = finder.FindAllAvailableBrowsers(options, device) new_default_browser = finder.SelectDefaultBrowser(curr_browsers) if new_default_browser: default_browsers.append(new_default_browser) browsers.extend(curr_browsers) if options.browser_type == None: if default_browsers: default_browser = sorted(default_browsers, key=lambda b: b.last_modification_time())[-1] logging.warning('--browser omitted. Using most recent local build: %s' % default_browser.browser_type) default_browser.UpdateExecutableIfNeeded() return default_browser if len(browsers) == 1: logging.warning('--browser omitted. Using only available browser: %s' % browsers[0].browser_type) browsers[0].UpdateExecutableIfNeeded() return browsers[0] raise browser_finder_exceptions.BrowserTypeRequiredException( '--browser must be specified. Available browsers:\n%s' % '\n'.join(sorted(set([b.browser_type for b in browsers])))) if options.browser_type == 'any': types = FindAllBrowserTypes(options) def CompareBrowsersOnTypePriority(x, y): x_idx = types.index(x.browser_type) y_idx = types.index(y.browser_type) return x_idx - y_idx browsers.sort(CompareBrowsersOnTypePriority) if len(browsers) >= 1: browsers[0].UpdateExecutableIfNeeded() return browsers[0] else: return None matching_browsers = [b for b in browsers if b.browser_type == options.browser_type and b.SupportsOptions(options)] chosen_browser = None if len(matching_browsers) == 1: chosen_browser = matching_browsers[0] elif len(matching_browsers) > 1: logging.warning('Multiple browsers of the same type found: %s' % ( repr(matching_browsers))) chosen_browser = sorted(matching_browsers, key=lambda b: b.last_modification_time())[-1] if chosen_browser: logging.info('Chose browser: %s' % (repr(chosen_browser))) chosen_browser.UpdateExecutableIfNeeded() return chosen_browser @decorators.Cache def GetAllAvailableBrowsers(options, device): """Returns a list of available browsers on the device. Args: options: A BrowserOptions object. device: The target device, which can be None. Returns: A list of browser instances. Raises: BrowserFinderException: Options are improperly set, or an error occurred. """ if not device: return [] possible_browsers = [] for browser_finder in BROWSER_FINDERS: possible_browsers.extend( browser_finder.FindAllAvailableBrowsers(options, device)) return possible_browsers @decorators.Cache def GetAllAvailableBrowserTypes(options): """Returns a list of available browser types. Args: options: A BrowserOptions object. Returns: A list of browser type strings. Raises: BrowserFinderException: Options are improperly set, or an error occurred. """ devices = device_finder.GetDevicesMatchingOptions(options) possible_browsers = [] for device in devices: possible_browsers.extend(GetAllAvailableBrowsers(options, device)) type_list = set([browser.browser_type for browser in possible_browsers]) type_list = list(type_list) type_list.sort() return type_list
bsd-3-clause
GitHublong/hue
desktop/core/ext-py/Django-1.6.10/django/utils/importlib.py
105
1384
# Taken from Python 2.7 with permission from/by the original author. import sys from django.utils import six def _resolve_name(name, package, level): """Return the absolute name of the module to be imported.""" if not hasattr(package, 'rindex'): raise ValueError("'package' not set to a string") dot = len(package) for x in range(level, 1, -1): try: dot = package.rindex('.', 0, dot) except ValueError: raise ValueError("attempted relative import beyond top-level " "package") return "%s.%s" % (package[:dot], name) if six.PY3: from importlib import import_module else: def import_module(name, package=None): """Import a module. The 'package' argument is required when performing a relative import. It specifies the package to use as the anchor point from which to resolve the relative import to an absolute import. """ if name.startswith('.'): if not package: raise TypeError("relative imports require the 'package' argument") level = 0 for character in name: if character != '.': break level += 1 name = _resolve_name(name[level:], package, level) __import__(name) return sys.modules[name]
apache-2.0
mrichie/node-mwp
node_modules/weibo/node_modules/emoji/bin/create_emoji_js.py
6
2090
#!/usr/bin/python # -*- coding: utf-8 -*- # install pyquery first: $ sudo easy_install pyquery import os from pyquery import PyQuery as pq project_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # d = pq(url='https://raw.github.com/iamcal/php-emoji/master/table.htm') d = pq(open(project_dir + '/lib/table.htm', 'rb').read()) tr = d('table tr') content = open(project_dir + '/tpl/emoji_header.js', 'rb').read().decode('utf-8') for tre in tr[1:]: tds = pq(tre)('td') # val, name, Unified DoCoMo KDDI Softbank Google item = ['', '', '', '', '', '', ''] for index, tde in enumerate(tds): td = pq(tde) # <td><span class="emoji emoji2320e3"></span></td> if index == 0: val = td('span').attr('class')[11:].decode('utf-8') else: val = td.text().decode('utf-8') source = val if index != 1 and val != '-': # convert to str val = val[2:] val = val.split(' U+') val[0] = (r'\U' + '0' * (8 - len(val[0])) + val[0].lower()).decode('unicode-escape') if len(val) > 1: val[1] = (r'\U' + '0' * (8 - len(val[1])) + val[1].lower()).decode('unicode-escape') val = val[0] + val[1] else: val = val[0] if index > 1: val = [val, source] item[index] = val # print item # unified: [unified_unicode, name, classname, docomo, kddi, softbank, google] content += u' "' + item[2][0] + '": ["' + item[2][1] + '", "' + item[1] + '", "' + item[0] + '", ["' \ + item[3][0] + '", "' + item[3][1] + '"], ["' \ + item[4][0] + '", "' + item[4][1] + '"], ["' \ + item[5][0] + '", "' + item[5][1] + '"], ["' \ + item[6][0] + '", "' + item[6][1] + '"]],\n' content = content[:-2] + u'\n};\n\n' content += open(project_dir + '/tpl/emoji_footer.js', 'rb').read().decode('utf-8') f = open(project_dir + '/lib/emoji.js', 'wb') f.write(content.encode('utf-8')) f.close()
mit
CTSRD-SOAAP/chromium-42.0.2311.135
build/android/gyp/javac.py
2
8917
#!/usr/bin/env python # # Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import fnmatch import optparse import os import shutil import re import sys import textwrap from util import build_utils from util import md5_check import jar sys.path.append(build_utils.COLORAMA_ROOT) import colorama def ColorJavacOutput(output): fileline_prefix = r'(?P<fileline>(?P<file>[-.\w/\\]+.java):(?P<line>[0-9]+):)' warning_re = re.compile( fileline_prefix + r'(?P<full_message> warning: (?P<message>.*))$') error_re = re.compile( fileline_prefix + r'(?P<full_message> (?P<message>.*))$') marker_re = re.compile(r'\s*(?P<marker>\^)\s*$') warning_color = ['full_message', colorama.Fore.YELLOW + colorama.Style.DIM] error_color = ['full_message', colorama.Fore.MAGENTA + colorama.Style.BRIGHT] marker_color = ['marker', colorama.Fore.BLUE + colorama.Style.BRIGHT] def Colorize(line, regex, color): match = regex.match(line) start = match.start(color[0]) end = match.end(color[0]) return (line[:start] + color[1] + line[start:end] + colorama.Fore.RESET + colorama.Style.RESET_ALL + line[end:]) def ApplyColor(line): if warning_re.match(line): line = Colorize(line, warning_re, warning_color) elif error_re.match(line): line = Colorize(line, error_re, error_color) elif marker_re.match(line): line = Colorize(line, marker_re, marker_color) return line return '\n'.join(map(ApplyColor, output.split('\n'))) def DoJavac( classpath, classes_dir, chromium_code, java_files): """Runs javac. Builds |java_files| with the provided |classpath| and puts the generated .class files into |classes_dir|. If |chromium_code| is true, extra lint checking will be enabled. """ jar_inputs = [] for path in classpath: if os.path.exists(path + '.TOC'): jar_inputs.append(path + '.TOC') else: jar_inputs.append(path) javac_args = [ '-g', # Chromium only allows UTF8 source files. Being explicit avoids # javac pulling a default encoding from the user's environment. '-encoding', 'UTF-8', '-source', '1.7', '-target', '1.7', '-classpath', ':'.join(classpath), '-d', classes_dir] if chromium_code: javac_args.extend(['-Xlint:unchecked', '-Xlint:deprecation']) else: # XDignore.symbol.file makes javac compile against rt.jar instead of # ct.sym. This means that using a java internal package/class will not # trigger a compile warning or error. javac_args.extend(['-XDignore.symbol.file']) javac_cmd = ['javac'] + javac_args + java_files def Compile(): build_utils.CheckOutput( javac_cmd, print_stdout=chromium_code, stderr_filter=ColorJavacOutput) record_path = os.path.join(classes_dir, 'javac.md5.stamp') md5_check.CallAndRecordIfStale( Compile, record_path=record_path, input_paths=java_files + jar_inputs, input_strings=javac_cmd) _MAX_MANIFEST_LINE_LEN = 72 def CreateManifest(manifest_path, classpath, main_class=None, manifest_entries=None): """Creates a manifest file with the given parameters. This generates a manifest file that compiles with the spec found at http://docs.oracle.com/javase/7/docs/technotes/guides/jar/jar.html#JAR_Manifest Args: manifest_path: The path to the manifest file that should be created. classpath: The JAR files that should be listed on the manifest file's classpath. main_class: If present, the class containing the main() function. manifest_entries: If present, a list of (key, value) pairs to add to the manifest. """ output = ['Manifest-Version: 1.0'] if main_class: output.append('Main-Class: %s' % main_class) if manifest_entries: for k, v in manifest_entries: output.append('%s: %s' % (k, v)) if classpath: sanitized_paths = [] for path in classpath: sanitized_paths.append(os.path.basename(path.strip('"'))) output.append('Class-Path: %s' % ' '.join(sanitized_paths)) output.append('Created-By: ') output.append('') wrapper = textwrap.TextWrapper(break_long_words=True, drop_whitespace=False, subsequent_indent=' ', width=_MAX_MANIFEST_LINE_LEN - 2) output = '\r\n'.join(w for l in output for w in wrapper.wrap(l)) with open(manifest_path, 'w') as f: f.write(output) def main(argv): colorama.init() argv = build_utils.ExpandFileArgs(argv) parser = optparse.OptionParser() build_utils.AddDepfileOption(parser) parser.add_option( '--src-gendirs', help='Directories containing generated java files.') parser.add_option( '--java-srcjars', action='append', default=[], help='List of srcjars to include in compilation.') parser.add_option( '--classpath', action='append', help='Classpath for javac. If this is specified multiple times, they ' 'will all be appended to construct the classpath.') parser.add_option( '--javac-includes', help='A list of file patterns. If provided, only java files that match' 'one of the patterns will be compiled.') parser.add_option( '--jar-excluded-classes', default='', help='List of .class file patterns to exclude from the jar.') parser.add_option( '--chromium-code', type='int', help='Whether code being compiled should be built with stricter ' 'warnings for chromium code.') parser.add_option( '--classes-dir', help='Directory for compiled .class files.') parser.add_option('--jar-path', help='Jar output path.') parser.add_option( '--main-class', help='The class containing the main method.') parser.add_option( '--manifest-entry', action='append', help='Key:value pairs to add to the .jar manifest.') parser.add_option('--stamp', help='Path to touch on success.') options, args = parser.parse_args(argv) if options.main_class and not options.jar_path: parser.error('--main-class requires --jar-path') classpath = [] for arg in options.classpath: classpath += build_utils.ParseGypList(arg) java_srcjars = [] for arg in options.java_srcjars: java_srcjars += build_utils.ParseGypList(arg) java_files = args if options.src_gendirs: src_gendirs = build_utils.ParseGypList(options.src_gendirs) java_files += build_utils.FindInDirectories(src_gendirs, '*.java') input_files = classpath + java_srcjars + java_files with build_utils.TempDir() as temp_dir: classes_dir = os.path.join(temp_dir, 'classes') os.makedirs(classes_dir) if java_srcjars: java_dir = os.path.join(temp_dir, 'java') os.makedirs(java_dir) for srcjar in java_srcjars: build_utils.ExtractAll(srcjar, path=java_dir, pattern='*.java') java_files += build_utils.FindInDirectory(java_dir, '*.java') if options.javac_includes: javac_includes = build_utils.ParseGypList(options.javac_includes) filtered_java_files = [] for f in java_files: for include in javac_includes: if fnmatch.fnmatch(f, include): filtered_java_files.append(f) break java_files = filtered_java_files DoJavac( classpath, classes_dir, options.chromium_code, java_files) if options.jar_path: if options.main_class or options.manifest_entry: if options.manifest_entry: entries = map(lambda e: e.split(":"), options.manifest_entry) else: entries = [] manifest_file = os.path.join(temp_dir, 'manifest') CreateManifest(manifest_file, classpath, options.main_class, entries) else: manifest_file = None jar.JarDirectory(classes_dir, build_utils.ParseGypList(options.jar_excluded_classes), options.jar_path, manifest_file=manifest_file) if options.classes_dir: # Delete the old classes directory. This ensures that all .class files in # the output are actually from the input .java files. For example, if a # .java file is deleted or an inner class is removed, the classes # directory should not contain the corresponding old .class file after # running this action. build_utils.DeleteDirectory(options.classes_dir) shutil.copytree(classes_dir, options.classes_dir) if options.depfile: build_utils.WriteDepfile( options.depfile, input_files + build_utils.GetPythonDependencies()) if options.stamp: build_utils.Touch(options.stamp) if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
bsd-3-clause
h2oai/h2o
h2o-docs-theme/setup.py
84
1313
# -*- coding: utf-8 -*- """`sphinx_rtd_theme` lives on `Github`_. .. _github: https://www.github.com/snide/sphinx_rtd_theme """ from setuptools import setup from sphinx_rtd_theme import __version__ setup( name='sphinx_rtd_theme', version=__version__, url='https://github.com/snide/sphinx_rtd_theme/', license='MIT', author='Dave Snider', author_email='[email protected]', description='ReadTheDocs.org theme for Sphinx, 2013 version.', long_description=open('README.rst').read(), zip_safe=False, packages=['sphinx_rtd_theme'], package_data={'sphinx_rtd_theme': [ 'theme.conf', '*.html', 'static/css/*.css', 'static/js/*.js', 'static/font/*.*' ]}, include_package_data=True, install_requires=open('requirements.txt').read().splitlines(), classifiers=[ 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: BSD License', 'Environment :: Console', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Operating System :: OS Independent', 'Topic :: Documentation', 'Topic :: Software Development :: Documentation', ], )
apache-2.0
heromod/migrid
mig/shared/functionality/migadmin.py
1
14406
#!/usr/bin/python # -*- coding: utf-8 -*- # # --- BEGIN_HEADER --- # # migadmin - admin control panel with daemon status monitor # Copyright (C) 2003-2015 The MiG Project lead by Brian Vinter # # This file is part of MiG. # # MiG is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # MiG is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # -- END_HEADER --- # """MiG administrators page with daemon status and configuration""" import os import subprocess import shared.returnvalues as returnvalues from shared.certreq import build_certreqitem_object, list_cert_reqs, \ get_cert_req, delete_cert_req, accept_cert_req from shared.defaults import default_pager_entries from shared.fileio import send_message_to_grid_script from shared.findtype import is_admin from shared.functional import validate_input_and_cert from shared.html import html_post_helper, themed_styles from shared.init import initialize_main_variables, find_entry grid_actions = {'reloadconfig': 'RELOADCONFIG', 'showqueued': 'JOBQUEUEINFO', 'showexecuting': 'EXECUTINGQUEUEINFO', 'showdone': 'DONEQUEUEINFO', 'dropqueued': 'DROPQUEUED', 'dropexecuting': 'DROPEXECUTING', 'dropdone': 'DROPDONE', } certreq_actions = ['addcertreq', 'delcertreq'] def signature(): """Signature of the main function""" defaults = {'action': [''], 'req_id': [], 'job_id': [], 'lines': [20]} return ['html_form', defaults] def main(client_id, user_arguments_dict): """Main function used by front end""" (configuration, logger, output_objects, op_name) = \ initialize_main_variables(client_id, op_header=False) defaults = signature()[1] (validate_status, accepted) = validate_input_and_cert( user_arguments_dict, defaults, output_objects, client_id, configuration, allow_rejects=False, ) if not validate_status: return (accepted, returnvalues.CLIENT_ERROR) action = accepted['action'][-1] req_list = accepted['req_id'] job_list = accepted['job_id'] lines = int(accepted['lines'][-1]) meta = '''<meta http-equiv="refresh" content="%s" /> ''' % configuration.sleep_secs style = themed_styles(configuration) script = ''' <script type="text/javascript" src="/images/js/jquery.js"></script> <script type="text/javascript" src="/images/js/jquery.tablesorter.js"></script> <script type="text/javascript" src="/images/js/jquery.tablesorter.pager.js"> </script> <script type="text/javascript" src="/images/js/jquery.tablesorter.widgets.js"></script> <script type="text/javascript" src="/images/js/jquery-ui.js"></script> <script type="text/javascript" src="/images/js/jquery.confirm.js"></script> <script type="text/javascript" > $(document).ready(function() { // init confirmation dialog $( "#confirm_dialog" ).dialog( // see http://jqueryui.com/docs/dialog/ for options { autoOpen: false, modal: true, closeOnEscape: true, width: 500, buttons: { "Cancel": function() { $( "#" + name ).dialog("close"); } } }); // table initially sorted by col. 9 (created) var sortOrder = [[9,0]]; $("#certreqtable").tablesorter({widgets: ["zebra", "saveSort"], sortList:sortOrder }) .tablesorterPager({ container: $("#pager"), size: %s }); } ); </script> ''' % default_pager_entries title_entry = find_entry(output_objects, 'title') title_entry['text'] = '%s administration panel' % configuration.short_title title_entry['meta'] = meta title_entry['style'] = style title_entry['javascript'] = script output_objects.append({'object_type': 'html_form', 'text':''' <div id="confirm_dialog" title="Confirm" style="background:#fff;"> <div id="confirm_text"><!-- filled by js --></div> <textarea cols="40" rows="4" id="confirm_input" style="display:none;"></textarea> </div> ''' }) if not is_admin(client_id, configuration, logger): output_objects.append( {'object_type': 'error_text', 'text' : 'You must be an admin to access this control panel.'}) return (output_objects, returnvalues.CLIENT_ERROR) html = '' if action and not action in grid_actions.keys() + certreq_actions: output_objects.append({'object_type': 'error_text', 'text' : 'Invalid action: %s' % action}) return (output_objects, returnvalues.SYSTEM_ERROR) if action in grid_actions: msg = "%s" % grid_actions[action] if job_list: msg += ' %s' % ' '.join(job_list) msg += '\n' if not send_message_to_grid_script(msg, logger, configuration): output_objects.append( {'object_type': 'error_text', 'text' : '''Error sending %s message to grid_script.''' % action }) status = returnvalues.SYSTEM_ERROR elif action in certreq_actions: if action == "addcertreq": for req_id in req_list: if accept_cert_req(req_id, configuration): output_objects.append( {'object_type': 'text', 'text': 'Accepted certificate request %s' % req_id}) else: output_objects.append( {'object_type': 'error_text', 'text': 'Accept certificate request failed - details in log' }) elif action == "delcertreq": for req_id in req_list: if delete_cert_req(req_id, configuration): output_objects.append( {'object_type': 'text', 'text': 'Deleted certificate request %s' % req_id}) else: output_objects.append( {'object_type': 'error_text', 'text': 'Delete certificate request failed - details in log' }) show, drop = '', '' general = """ <h1>Server Status</h1> <p class='importanttext'> This page automatically refreshes every %s seconds. </p> <p> You can see the current grid daemon status and server logs below. The buttons provide access to e.g. managing the grid job queues. </p> <form method='get' action='migadmin.py'> <input type='hidden' name='action' value='' /> <input type='submit' value='Show last log lines' /> <input type='text' size='2' name='lines' value='%s' /> </form> <br /> <form method='get' action='migadmin.py'> <input type='hidden' name='lines' value='%s' /> <input type='hidden' name='action' value='reloadconfig' /> <input type='submit' value='Reload Configuration' /> </form> <br /> """ % (configuration.sleep_secs, lines, lines) show += """ <form method='get' action='migadmin.py'> <input type='hidden' name='lines' value='%s' /> <input type='submit' value='Log Jobs' /> <select name='action'> """ % lines drop += """ <form method='get' action='migadmin.py'> <input type='hidden' name='lines' value='%s' /> <input type='submit' value='Drop Job' /> <select name='action'> """ % lines for queue in ['queued', 'executing', 'done']: selected = '' if action.find(queue) != -1: selected = 'selected' show += "<option %s value='show%s'>%s</option>" % (selected, queue, queue) drop += "<option %s value='drop%s'>%s</option>" % (selected, queue, queue) show += """ </select> </form> <br /> """ drop += """ </select> <input type='text' size='20' name='job_id' value='' /> </form> <br /> """ html += general html += show html += drop daemons = """ <div id='daemonstatus'> """ daemon_names = ['grid_script.py', 'grid_monitor.py', 'grid_sshmux.py'] # No need to run im_notify unless any im notify protocols are enabled if [i for i in configuration.notify_protocols if i != 'email']: daemon_names.append('grid_imnotify.py') if configuration.site_enable_sftp: daemon_names.append('grid_sftp.py') if configuration.site_enable_davs: daemon_names.append('grid_webdavs.py') if configuration.site_enable_ftps: daemon_names.append('grid_ftps.py') if configuration.site_enable_openid: daemon_names.append('grid_openid.py') for proc in daemon_names: pgrep_proc = subprocess.Popen(['pgrep', '-f', proc], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) pgrep_proc.wait() ps_out = pgrep_proc.stdout.read().strip() if pgrep_proc.returncode == 0: daemons += "<div class='status_online'>%s running (pid %s)</div>" \ % (proc, ps_out) else: daemons += "<div class='status_offline'>%s not running!</div>" % \ proc daemons += """</div> <br /> """ html += daemons output_objects.append({'object_type': 'header', 'text' : 'Pending Certificate Requests'}) (status, ret) = list_cert_reqs(configuration) if not status: logger.error("%s: failed for '%s': %s" % (op_name, client_id, ret)) output_objects.append({'object_type': 'error_text', 'text' : ret}) return (output_objects, returnvalues.SYSTEM_ERROR) certreqs = [] for req_id in ret: (load_status, req_dict) = get_cert_req(req_id, configuration) if not load_status: logger.error("%s: load failed for '%s': %s" % \ (op_name, req_id, req_dict)) output_objects.append({'object_type': 'error_text', 'text' : 'Could not read details for "%s"' % \ req_id}) return (output_objects, returnvalues.SYSTEM_ERROR) req_item = build_certreqitem_object(configuration, req_dict) js_name = 'create%s' % req_id helper = html_post_helper(js_name, 'migadmin.py', {'action': 'addcertreq', 'req_id': req_id}) output_objects.append({'object_type': 'html_form', 'text': helper}) req_item['addcertreqlink'] = { 'object_type': 'link', 'destination': "javascript: confirmDialog(%s, '%s');" % \ (js_name, 'Really accept %s?' % req_id), 'class': 'addlink', 'title': 'Accept %s' % req_id, 'text': ''} js_name = 'delete%s' % req_id helper = html_post_helper(js_name, 'migadmin.py', {'action': 'delcertreq', 'req_id': req_id}) output_objects.append({'object_type': 'html_form', 'text': helper}) req_item['delcertreqlink'] = { 'object_type': 'link', 'destination': "javascript: confirmDialog(%s, '%s');" % \ (js_name, 'Really remove %s?' % req_id), 'class': 'removelink', 'title': 'Remove %s' % req_id, 'text': ''} certreqs.append(req_item) output_objects.append({'object_type': 'table_pager', 'entry_name': 'pending certificate requests', 'default_entries': default_pager_entries}) output_objects.append({'object_type': 'certreqs', 'certreqs': certreqs}) log_path_list = [] if os.path.isabs(configuration.logfile): log_path_list.append(configuration.logfile) else: log_path_list.append(os.path.join(configuration.log_dir, configuration.logfile)) for log_path in log_path_list: html += ''' <h1>%s</h1> <textarea rows=%s cols=200 readonly="readonly"> ''' % (log_path, lines) try: logger.debug("loading %d lines from %s" % (lines, log_path)) log_fd = open(log_path, 'r') log_fd.seek(0, os.SEEK_END) size = log_fd.tell() pos = log_fd.tell() log_lines = [] step_size = 100 # locate last X lines while pos > 0 and len(log_lines) < lines: offset = min(lines * step_size, size) logger.debug("seek to offset %d from end of %s" % (offset, log_path)) log_fd.seek(-offset, os.SEEK_END) pos = log_fd.tell() log_lines = log_fd.readlines() step_size *= 2 logger.debug("reading %d lines from %s" % (lines, log_path)) html += ''.join(log_lines[-lines:]) log_fd.close() except Exception, exc: logger.error("reading %d lines from %s: %s" % (lines, log_path, exc)) output_objects.append({'object_type': 'error_text', 'text' : 'Error reading log (%s)' % exc}) return (output_objects, returnvalues.SYSTEM_ERROR) html += '''</textarea> ''' output_objects.append({'object_type': 'html_form', 'text' : html}) return (output_objects, returnvalues.OK)
gpl-2.0
apocalypsebg/odoo
openerp/addons/base/ir/ir_attachment.py
183
16487
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import hashlib import itertools import logging import os import re from openerp import tools from openerp.tools.translate import _ from openerp.exceptions import AccessError from openerp.osv import fields,osv from openerp import SUPERUSER_ID from openerp.osv.orm import except_orm from openerp.tools.translate import _ _logger = logging.getLogger(__name__) class ir_attachment(osv.osv): """Attachments are used to link binary files or url to any openerp document. External attachment storage --------------------------- The 'data' function field (_data_get,data_set) is implemented using _file_read, _file_write and _file_delete which can be overridden to implement other storage engines, shuch methods should check for other location pseudo uri (example: hdfs://hadoppserver) The default implementation is the file:dirname location that stores files on the local filesystem using name based on their sha1 hash """ _order = 'id desc' def _name_get_resname(self, cr, uid, ids, object, method, context): data = {} for attachment in self.browse(cr, uid, ids, context=context): model_object = attachment.res_model res_id = attachment.res_id if model_object and res_id: model_pool = self.pool[model_object] res = model_pool.name_get(cr,uid,[res_id],context) res_name = res and res[0][1] or None if res_name: field = self._columns.get('res_name',False) if field and len(res_name) > field.size: res_name = res_name[:30] + '...' data[attachment.id] = res_name or False else: data[attachment.id] = False return data def _storage(self, cr, uid, context=None): return self.pool['ir.config_parameter'].get_param(cr, SUPERUSER_ID, 'ir_attachment.location', 'file') def _filestore(self, cr, uid, context=None): return tools.config.filestore(cr.dbname) def force_storage(self, cr, uid, context=None): """Force all attachments to be stored in the currently configured storage""" if not self.pool['res.users'].has_group(cr, uid, 'base.group_erp_manager'): raise AccessError(_('Only administrators can execute this action.')) location = self._storage(cr, uid, context) domain = { 'db': [('store_fname', '!=', False)], 'file': [('db_datas', '!=', False)], }[location] ids = self.search(cr, uid, domain, context=context) for attach in self.browse(cr, uid, ids, context=context): attach.write({'datas': attach.datas}) return True # 'data' field implementation def _full_path(self, cr, uid, path): # sanitize ath path = re.sub('[.]', '', path) path = path.strip('/\\') return os.path.join(self._filestore(cr, uid), path) def _get_path(self, cr, uid, bin_data): sha = hashlib.sha1(bin_data).hexdigest() # retro compatibility fname = sha[:3] + '/' + sha full_path = self._full_path(cr, uid, fname) if os.path.isfile(full_path): return fname, full_path # keep existing path # scatter files across 256 dirs # we use '/' in the db (even on windows) fname = sha[:2] + '/' + sha full_path = self._full_path(cr, uid, fname) dirname = os.path.dirname(full_path) if not os.path.isdir(dirname): os.makedirs(dirname) return fname, full_path def _file_read(self, cr, uid, fname, bin_size=False): full_path = self._full_path(cr, uid, fname) r = '' try: if bin_size: r = os.path.getsize(full_path) else: r = open(full_path,'rb').read().encode('base64') except IOError: _logger.exception("_read_file reading %s", full_path) return r def _file_write(self, cr, uid, value): bin_value = value.decode('base64') fname, full_path = self._get_path(cr, uid, bin_value) if not os.path.exists(full_path): try: with open(full_path, 'wb') as fp: fp.write(bin_value) except IOError: _logger.exception("_file_write writing %s", full_path) return fname def _file_delete(self, cr, uid, fname): # using SQL to include files hidden through unlink or due to record rules cr.execute("SELECT COUNT(*) FROM ir_attachment WHERE store_fname = %s", (fname,)) count = cr.fetchone()[0] full_path = self._full_path(cr, uid, fname) if not count and os.path.exists(full_path): try: os.unlink(full_path) except OSError: _logger.exception("_file_delete could not unlink %s", full_path) except IOError: # Harmless and needed for race conditions _logger.exception("_file_delete could not unlink %s", full_path) def _data_get(self, cr, uid, ids, name, arg, context=None): if context is None: context = {} result = {} bin_size = context.get('bin_size') for attach in self.browse(cr, uid, ids, context=context): if attach.store_fname: result[attach.id] = self._file_read(cr, uid, attach.store_fname, bin_size) else: result[attach.id] = attach.db_datas return result def _data_set(self, cr, uid, id, name, value, arg, context=None): # We dont handle setting data to null if not value: return True if context is None: context = {} location = self._storage(cr, uid, context) file_size = len(value.decode('base64')) attach = self.browse(cr, uid, id, context=context) fname_to_delete = attach.store_fname if location != 'db': fname = self._file_write(cr, uid, value) # SUPERUSER_ID as probably don't have write access, trigger during create super(ir_attachment, self).write(cr, SUPERUSER_ID, [id], {'store_fname': fname, 'file_size': file_size, 'db_datas': False}, context=context) else: super(ir_attachment, self).write(cr, SUPERUSER_ID, [id], {'db_datas': value, 'file_size': file_size, 'store_fname': False}, context=context) # After de-referencing the file in the database, check whether we need # to garbage-collect it on the filesystem if fname_to_delete: self._file_delete(cr, uid, fname_to_delete) return True _name = 'ir.attachment' _columns = { 'name': fields.char('Attachment Name', required=True), 'datas_fname': fields.char('File Name'), 'description': fields.text('Description'), 'res_name': fields.function(_name_get_resname, type='char', string='Resource Name', store=True), 'res_model': fields.char('Resource Model', readonly=True, help="The database object this attachment will be attached to"), 'res_id': fields.integer('Resource ID', readonly=True, help="The record id this is attached to"), 'create_date': fields.datetime('Date Created', readonly=True), 'create_uid': fields.many2one('res.users', 'Owner', readonly=True), 'company_id': fields.many2one('res.company', 'Company', change_default=True), 'type': fields.selection( [ ('url','URL'), ('binary','Binary'), ], 'Type', help="Binary File or URL", required=True, change_default=True), 'url': fields.char('Url', size=1024), # al: We keep shitty field names for backward compatibility with document 'datas': fields.function(_data_get, fnct_inv=_data_set, string='File Content', type="binary", nodrop=True), 'store_fname': fields.char('Stored Filename'), 'db_datas': fields.binary('Database Data'), 'file_size': fields.integer('File Size'), } _defaults = { 'type': 'binary', 'file_size': 0, 'company_id': lambda s,cr,uid,c: s.pool.get('res.company')._company_default_get(cr, uid, 'ir.attachment', context=c), } def _auto_init(self, cr, context=None): super(ir_attachment, self)._auto_init(cr, context) cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('ir_attachment_res_idx',)) if not cr.fetchone(): cr.execute('CREATE INDEX ir_attachment_res_idx ON ir_attachment (res_model, res_id)') cr.commit() def check(self, cr, uid, ids, mode, context=None, values=None): """Restricts the access to an ir.attachment, according to referred model In the 'document' module, it is overriden to relax this hard rule, since more complex ones apply there. """ res_ids = {} require_employee = False if ids: if isinstance(ids, (int, long)): ids = [ids] cr.execute('SELECT DISTINCT res_model, res_id, create_uid FROM ir_attachment WHERE id = ANY (%s)', (ids,)) for rmod, rid, create_uid in cr.fetchall(): if not (rmod and rid): if create_uid != uid: require_employee = True continue res_ids.setdefault(rmod,set()).add(rid) if values: if values.get('res_model') and values.get('res_id'): res_ids.setdefault(values['res_model'],set()).add(values['res_id']) ima = self.pool.get('ir.model.access') for model, mids in res_ids.items(): # ignore attachments that are not attached to a resource anymore when checking access rights # (resource was deleted but attachment was not) if not self.pool.get(model): require_employee = True continue existing_ids = self.pool[model].exists(cr, uid, mids) if len(existing_ids) != len(mids): require_employee = True ima.check(cr, uid, model, mode) self.pool[model].check_access_rule(cr, uid, existing_ids, mode, context=context) if require_employee: if not uid == SUPERUSER_ID and not self.pool['res.users'].has_group(cr, uid, 'base.group_user'): raise except_orm(_('Access Denied'), _("Sorry, you are not allowed to access this document.")) def _search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None): ids = super(ir_attachment, self)._search(cr, uid, args, offset=offset, limit=limit, order=order, context=context, count=False, access_rights_uid=access_rights_uid) if not ids: if count: return 0 return [] # Work with a set, as list.remove() is prohibitive for large lists of documents # (takes 20+ seconds on a db with 100k docs during search_count()!) orig_ids = ids ids = set(ids) # For attachments, the permissions of the document they are attached to # apply, so we must remove attachments for which the user cannot access # the linked document. # Use pure SQL rather than read() as it is about 50% faster for large dbs (100k+ docs), # and the permissions are checked in super() and below anyway. cr.execute("""SELECT id, res_model, res_id FROM ir_attachment WHERE id = ANY(%s)""", (list(ids),)) targets = cr.dictfetchall() model_attachments = {} for target_dict in targets: if not target_dict['res_model']: continue # model_attachments = { 'model': { 'res_id': [id1,id2] } } model_attachments.setdefault(target_dict['res_model'],{}).setdefault(target_dict['res_id'] or 0, set()).add(target_dict['id']) # To avoid multiple queries for each attachment found, checks are # performed in batch as much as possible. ima = self.pool.get('ir.model.access') for model, targets in model_attachments.iteritems(): if model not in self.pool: continue if not ima.check(cr, uid, model, 'read', False): # remove all corresponding attachment ids for attach_id in itertools.chain(*targets.values()): ids.remove(attach_id) continue # skip ir.rule processing, these ones are out already # filter ids according to what access rules permit target_ids = targets.keys() allowed_ids = [0] + self.pool[model].search(cr, uid, [('id', 'in', target_ids)], context=context) disallowed_ids = set(target_ids).difference(allowed_ids) for res_id in disallowed_ids: for attach_id in targets[res_id]: ids.remove(attach_id) # sort result according to the original sort ordering result = [id for id in orig_ids if id in ids] return len(result) if count else list(result) def read(self, cr, uid, ids, fields_to_read=None, context=None, load='_classic_read'): if isinstance(ids, (int, long)): ids = [ids] self.check(cr, uid, ids, 'read', context=context) return super(ir_attachment, self).read(cr, uid, ids, fields_to_read, context=context, load=load) def write(self, cr, uid, ids, vals, context=None): if isinstance(ids, (int, long)): ids = [ids] self.check(cr, uid, ids, 'write', context=context, values=vals) if 'file_size' in vals: del vals['file_size'] return super(ir_attachment, self).write(cr, uid, ids, vals, context) def copy(self, cr, uid, id, default=None, context=None): self.check(cr, uid, [id], 'write', context=context) return super(ir_attachment, self).copy(cr, uid, id, default, context) def unlink(self, cr, uid, ids, context=None): if isinstance(ids, (int, long)): ids = [ids] self.check(cr, uid, ids, 'unlink', context=context) # First delete in the database, *then* in the filesystem if the # database allowed it. Helps avoid errors when concurrent transactions # are deleting the same file, and some of the transactions are # rolled back by PostgreSQL (due to concurrent updates detection). to_delete = [a.store_fname for a in self.browse(cr, uid, ids, context=context) if a.store_fname] res = super(ir_attachment, self).unlink(cr, uid, ids, context) for file_path in to_delete: self._file_delete(cr, uid, file_path) return res def create(self, cr, uid, values, context=None): self.check(cr, uid, [], mode='write', context=context, values=values) if 'file_size' in values: del values['file_size'] return super(ir_attachment, self).create(cr, uid, values, context) def action_get(self, cr, uid, context=None): return self.pool.get('ir.actions.act_window').for_xml_id( cr, uid, 'base', 'action_attachment', context=context) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
DucQuang1/youtube-dl
youtube_dl/extractor/youku.py
49
7678
# coding: utf-8 from __future__ import unicode_literals import base64 from .common import InfoExtractor from ..utils import ExtractorError from ..compat import ( compat_urllib_parse, compat_ord, compat_urllib_request, ) class YoukuIE(InfoExtractor): IE_NAME = 'youku' IE_DESC = '优酷' _VALID_URL = r'''(?x) (?: http://(?:v|player)\.youku\.com/(?:v_show/id_|player\.php/sid/)| youku:) (?P<id>[A-Za-z0-9]+)(?:\.html|/v\.swf|) ''' _TESTS = [{ 'url': 'http://v.youku.com/v_show/id_XMTc1ODE5Njcy.html', 'md5': '5f3af4192eabacc4501508d54a8cabd7', 'info_dict': { 'id': 'XMTc1ODE5Njcy_part1', 'title': '★Smile﹗♡ Git Fresh -Booty Music舞蹈.', 'ext': 'flv' } }, { 'url': 'http://player.youku.com/player.php/sid/XNDgyMDQ2NTQw/v.swf', 'only_matching': True, }, { 'url': 'http://v.youku.com/v_show/id_XODgxNjg1Mzk2_ev_1.html', 'info_dict': { 'id': 'XODgxNjg1Mzk2', 'title': '武媚娘传奇 85', }, 'playlist_count': 11, }, { 'url': 'http://v.youku.com/v_show/id_XMTI1OTczNDM5Mg==.html', 'info_dict': { 'id': 'XMTI1OTczNDM5Mg', 'title': '花千骨 04', }, 'playlist_count': 13, 'skip': 'Available in China only', }] def construct_video_urls(self, data1, data2): # get sid, token def yk_t(s1, s2): ls = list(range(256)) t = 0 for i in range(256): t = (t + ls[i] + compat_ord(s1[i % len(s1)])) % 256 ls[i], ls[t] = ls[t], ls[i] s = bytearray() x, y = 0, 0 for i in range(len(s2)): y = (y + 1) % 256 x = (x + ls[y]) % 256 ls[x], ls[y] = ls[y], ls[x] s.append(compat_ord(s2[i]) ^ ls[(ls[x] + ls[y]) % 256]) return bytes(s) sid, token = yk_t( b'becaf9be', base64.b64decode(data2['ep'].encode('ascii')) ).decode('ascii').split('_') # get oip oip = data2['ip'] # get fileid string_ls = list( 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ/\:._-1234567890') shuffled_string_ls = [] seed = data1['seed'] N = len(string_ls) for ii in range(N): seed = (seed * 0xd3 + 0x754f) % 0x10000 idx = seed * len(string_ls) // 0x10000 shuffled_string_ls.append(string_ls[idx]) del string_ls[idx] fileid_dict = {} for format in data1['streamtypes']: streamfileid = [ int(i) for i in data1['streamfileids'][format].strip('*').split('*')] fileid = ''.join( [shuffled_string_ls[i] for i in streamfileid]) fileid_dict[format] = fileid[:8] + '%s' + fileid[10:] def get_fileid(format, n): fileid = fileid_dict[format] % hex(int(n))[2:].upper().zfill(2) return fileid # get ep def generate_ep(format, n): fileid = get_fileid(format, n) ep_t = yk_t( b'bf7e5f01', ('%s_%s_%s' % (sid, fileid, token)).encode('ascii') ) ep = base64.b64encode(ep_t).decode('ascii') return ep # generate video_urls video_urls_dict = {} for format in data1['streamtypes']: video_urls = [] for dt in data1['segs'][format]: n = str(int(dt['no'])) param = { 'K': dt['k'], 'hd': self.get_hd(format), 'myp': 0, 'ts': dt['seconds'], 'ypp': 0, 'ctype': 12, 'ev': 1, 'token': token, 'oip': oip, 'ep': generate_ep(format, n) } video_url = \ 'http://k.youku.com/player/getFlvPath/' + \ 'sid/' + sid + \ '_' + str(int(n) + 1).zfill(2) + \ '/st/' + self.parse_ext_l(format) + \ '/fileid/' + get_fileid(format, n) + '?' + \ compat_urllib_parse.urlencode(param) video_urls.append(video_url) video_urls_dict[format] = video_urls return video_urls_dict def get_hd(self, fm): hd_id_dict = { 'flv': '0', 'mp4': '1', 'hd2': '2', 'hd3': '3', '3gp': '0', '3gphd': '1' } return hd_id_dict[fm] def parse_ext_l(self, fm): ext_dict = { 'flv': 'flv', 'mp4': 'mp4', 'hd2': 'flv', 'hd3': 'flv', '3gp': 'flv', '3gphd': 'mp4' } return ext_dict[fm] def get_format_name(self, fm): _dict = { '3gp': 'h6', '3gphd': 'h5', 'flv': 'h4', 'mp4': 'h3', 'hd2': 'h2', 'hd3': 'h1' } return _dict[fm] def _real_extract(self, url): video_id = self._match_id(url) def retrieve_data(req_url, note): req = compat_urllib_request.Request(req_url) cn_verification_proxy = self._downloader.params.get('cn_verification_proxy') if cn_verification_proxy: req.add_header('Ytdl-request-proxy', cn_verification_proxy) raw_data = self._download_json(req, video_id, note=note) return raw_data['data'][0] # request basic data data1 = retrieve_data( 'http://v.youku.com/player/getPlayList/VideoIDS/%s' % video_id, 'Downloading JSON metadata 1') data2 = retrieve_data( 'http://v.youku.com/player/getPlayList/VideoIDS/%s/Pf/4/ctype/12/ev/1' % video_id, 'Downloading JSON metadata 2') error_code = data1.get('error_code') if error_code: error = data1.get('error') if error is not None and '因版权原因无法观看此视频' in error: raise ExtractorError( 'Youku said: Sorry, this video is available in China only', expected=True) else: msg = 'Youku server reported error %i' % error_code if error is not None: msg += ': ' + error raise ExtractorError(msg) title = data1['title'] # generate video_urls_dict video_urls_dict = self.construct_video_urls(data1, data2) # construct info entries = [{ 'id': '%s_part%d' % (video_id, i + 1), 'title': title, 'formats': [], # some formats are not available for all parts, we have to detect # which one has all } for i in range(max(len(v) for v in data1['segs'].values()))] for fm in data1['streamtypes']: video_urls = video_urls_dict[fm] for video_url, seg, entry in zip(video_urls, data1['segs'][fm], entries): entry['formats'].append({ 'url': video_url, 'format_id': self.get_format_name(fm), 'ext': self.parse_ext_l(fm), 'filesize': int(seg['size']), }) return { '_type': 'multi_video', 'id': video_id, 'title': title, 'entries': entries, }
unlicense
AsimmHirani/ISpyPi
tensorflow/contrib/tensorflow-master/tensorflow/tensorboard/lib/python/http_util_test.py
21
6517
# -*- coding: utf-8 -*- # Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests HTTP utilities.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import gzip import six from werkzeug import test as wtest from werkzeug import wrappers from tensorflow.python.platform import test from tensorflow.tensorboard.lib.python import http_util class RespondTest(test.TestCase): def testHelloWorld(self): q = wrappers.Request(wtest.EnvironBuilder().get_environ()) r = http_util.Respond(q, '<b>hello world</b>', 'text/html') self.assertEqual(r.status_code, 200) self.assertEqual(r.response[0], six.b('<b>hello world</b>')) def testHeadRequest_doesNotWrite(self): builder = wtest.EnvironBuilder(method='HEAD') env = builder.get_environ() request = wrappers.Request(env) r = http_util.Respond(request, '<b>hello world</b>', 'text/html') self.assertEqual(r.status_code, 200) self.assertEqual(r.response[0], six.b('')) def testPlainText_appendsUtf8ToContentType(self): q = wrappers.Request(wtest.EnvironBuilder().get_environ()) r = http_util.Respond(q, 'hello', 'text/plain') h = r.headers self.assertEqual(h.get('Content-Type'), 'text/plain; charset=utf-8') def testContentLength_isInBytes(self): q = wrappers.Request(wtest.EnvironBuilder().get_environ()) r = http_util.Respond(q, '爱', 'text/plain') self.assertEqual(r.headers.get('Content-Length'), '3') q = wrappers.Request(wtest.EnvironBuilder().get_environ()) r = http_util.Respond(q, '爱'.encode('utf-8'), 'text/plain') self.assertEqual(r.headers.get('Content-Length'), '3') def testResponseCharsetTranscoding(self): bean = '要依法治国是赞美那些谁是公义的和惩罚恶人。 - 韩非' # input is unicode string, output is gbk string q = wrappers.Request(wtest.EnvironBuilder().get_environ()) r = http_util.Respond(q, bean, 'text/plain; charset=gbk') self.assertEqual(r.response[0], bean.encode('gbk')) # input is utf-8 string, output is gbk string q = wrappers.Request(wtest.EnvironBuilder().get_environ()) r = http_util.Respond(q, bean.encode('utf-8'), 'text/plain; charset=gbk') self.assertEqual(r.response[0], bean.encode('gbk')) # input is object with unicode strings, output is gbk json q = wrappers.Request(wtest.EnvironBuilder().get_environ()) r = http_util.Respond(q, {'red': bean}, 'application/json; charset=gbk') self.assertEqual(r.response[0], b'{"red": "' + bean.encode('gbk') + b'"}') # input is object with utf-8 strings, output is gbk json q = wrappers.Request(wtest.EnvironBuilder().get_environ()) r = http_util.Respond( q, {'red': bean.encode('utf-8')}, 'application/json; charset=gbk') self.assertEqual(r.response[0], b'{"red": "' + bean.encode('gbk') + b'"}') # input is object with gbk strings, output is gbk json q = wrappers.Request(wtest.EnvironBuilder().get_environ()) r = http_util.Respond( q, {'red': bean.encode('gbk')}, 'application/json; charset=gbk', encoding='gbk') self.assertEqual(r.response[0], b'{"red": "' + bean.encode('gbk') + b'"}') def testAcceptGzip_compressesResponse(self): fall_of_hyperion_canto1_stanza1 = '\n'.join([ 'Fanatics have their dreams, wherewith they weave', 'A paradise for a sect; the savage too', 'From forth the loftiest fashion of his sleep', 'Guesses at Heaven; pity these have not', 'Trac\'d upon vellum or wild Indian leaf', 'The shadows of melodious utterance.', 'But bare of laurel they live, dream, and die;', 'For Poesy alone can tell her dreams,', 'With the fine spell of words alone can save', 'Imagination from the sable charm', 'And dumb enchantment. Who alive can say,', '\'Thou art no Poet may\'st not tell thy dreams?\'', 'Since every man whose soul is not a clod', 'Hath visions, and would speak, if he had loved', 'And been well nurtured in his mother tongue.', 'Whether the dream now purpos\'d to rehearse', 'Be poet\'s or fanatic\'s will be known', 'When this warm scribe my hand is in the grave.', ]) e1 = wtest.EnvironBuilder(headers={'Accept-Encoding': '*'}).get_environ() any_encoding = wrappers.Request(e1) r = http_util.Respond( any_encoding, fall_of_hyperion_canto1_stanza1, 'text/plain') self.assertEqual(r.headers.get('Content-Encoding'), 'gzip') self.assertEqual( _gunzip(r.response[0]), fall_of_hyperion_canto1_stanza1.encode('utf-8')) e2 = wtest.EnvironBuilder(headers={'Accept-Encoding': 'gzip'}).get_environ() gzip_encoding = wrappers.Request(e2) r = http_util.Respond( gzip_encoding, fall_of_hyperion_canto1_stanza1, 'text/plain') self.assertEqual(r.headers.get('Content-Encoding'), 'gzip') self.assertEqual( _gunzip(r.response[0]), fall_of_hyperion_canto1_stanza1.encode('utf-8')) r = http_util.Respond( any_encoding, fall_of_hyperion_canto1_stanza1, 'image/png') self.assertEqual( r.response[0], fall_of_hyperion_canto1_stanza1.encode('utf-8')) def testJson_getsAutoSerialized(self): q = wrappers.Request(wtest.EnvironBuilder().get_environ()) r = http_util.Respond(q, [1, 2, 3], 'application/json') self.assertEqual(r.response[0], b'[1, 2, 3]') def testExpires_setsCruiseControl(self): q = wrappers.Request(wtest.EnvironBuilder().get_environ()) r = http_util.Respond(q, '<b>hello world</b>', 'text/html', expires=60) self.assertEqual(r.headers.get('Cache-Control'), 'private, max-age=60') def _gunzip(bs): return gzip.GzipFile('', 'rb', 9, six.BytesIO(bs)).read() if __name__ == '__main__': test.main()
apache-2.0
MadCat34/Sick-Beard
lib/requests/status_codes.py
56
3029
# -*- coding: utf-8 -*- from .structures import LookupDict _codes = { # Informational. 100: ('continue',), 101: ('switching_protocols',), 102: ('processing',), 103: ('checkpoint',), 122: ('uri_too_long', 'request_uri_too_long'), 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/'), 201: ('created',), 202: ('accepted',), 203: ('non_authoritative_info', 'non_authoritative_information'), 204: ('no_content',), 205: ('reset_content', 'reset'), 206: ('partial_content', 'partial'), 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'), 208: ('im_used',), # Redirection. 300: ('multiple_choices',), 301: ('moved_permanently', 'moved', '\\o-'), 302: ('found',), 303: ('see_other', 'other'), 304: ('not_modified',), 305: ('use_proxy',), 306: ('switch_proxy',), 307: ('temporary_redirect', 'temporary_moved', 'temporary'), 308: ('resume_incomplete', 'resume'), # Client Error. 400: ('bad_request', 'bad'), 401: ('unauthorized',), 402: ('payment_required', 'payment'), 403: ('forbidden',), 404: ('not_found', '-o-'), 405: ('method_not_allowed', 'not_allowed'), 406: ('not_acceptable',), 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'), 408: ('request_timeout', 'timeout'), 409: ('conflict',), 410: ('gone',), 411: ('length_required',), 412: ('precondition_failed', 'precondition'), 413: ('request_entity_too_large',), 414: ('request_uri_too_large',), 415: ('unsupported_media_type', 'unsupported_media', 'media_type'), 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'), 417: ('expectation_failed',), 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'), 422: ('unprocessable_entity', 'unprocessable'), 423: ('locked',), 424: ('failed_dependency', 'dependency'), 425: ('unordered_collection', 'unordered'), 426: ('upgrade_required', 'upgrade'), 428: ('precondition_required', 'precondition'), 429: ('too_many_requests', 'too_many'), 431: ('header_fields_too_large', 'fields_too_large'), 444: ('no_response', 'none'), 449: ('retry_with', 'retry'), 450: ('blocked_by_windows_parental_controls', 'parental_controls'), 499: ('client_closed_request',), # Server Error. 500: ('internal_server_error', 'server_error', '/o\\'), 501: ('not_implemented',), 502: ('bad_gateway',), 503: ('service_unavailable', 'unavailable'), 504: ('gateway_timeout',), 505: ('http_version_not_supported', 'http_version'), 506: ('variant_also_negotiates',), 507: ('insufficient_storage',), 509: ('bandwidth_limit_exceeded', 'bandwidth'), 510: ('not_extended',), } codes = LookupDict(name='status_codes') for (code, titles) in list(_codes.items()): for title in titles: setattr(codes, title, code) if not title.startswith('\\'): setattr(codes, title.upper(), code)
gpl-3.0
kJhdnv9Za7Qh/Passport-Enquiries-Prototype
node_modules/node-sass/node_modules/pangyp/gyp/pylib/gyp/MSVSSettings.py
437
43606
# Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Code to validate and convert settings of the Microsoft build tools. This file contains code to validate and convert settings of the Microsoft build tools. The function ConvertToMSBuildSettings(), ValidateMSVSSettings(), and ValidateMSBuildSettings() are the entry points. This file was created by comparing the projects created by Visual Studio 2008 and Visual Studio 2010 for all available settings through the user interface. The MSBuild schemas were also considered. They are typically found in the MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild """ import sys import re # Dictionaries of settings validators. The key is the tool name, the value is # a dictionary mapping setting names to validation functions. _msvs_validators = {} _msbuild_validators = {} # A dictionary of settings converters. The key is the tool name, the value is # a dictionary mapping setting names to conversion functions. _msvs_to_msbuild_converters = {} # Tool name mapping from MSVS to MSBuild. _msbuild_name_of_tool = {} class _Tool(object): """Represents a tool used by MSVS or MSBuild. Attributes: msvs_name: The name of the tool in MSVS. msbuild_name: The name of the tool in MSBuild. """ def __init__(self, msvs_name, msbuild_name): self.msvs_name = msvs_name self.msbuild_name = msbuild_name def _AddTool(tool): """Adds a tool to the four dictionaries used to process settings. This only defines the tool. Each setting also needs to be added. Args: tool: The _Tool object to be added. """ _msvs_validators[tool.msvs_name] = {} _msbuild_validators[tool.msbuild_name] = {} _msvs_to_msbuild_converters[tool.msvs_name] = {} _msbuild_name_of_tool[tool.msvs_name] = tool.msbuild_name def _GetMSBuildToolSettings(msbuild_settings, tool): """Returns an MSBuild tool dictionary. Creates it if needed.""" return msbuild_settings.setdefault(tool.msbuild_name, {}) class _Type(object): """Type of settings (Base class).""" def ValidateMSVS(self, value): """Verifies that the value is legal for MSVS. Args: value: the value to check for this type. Raises: ValueError if value is not valid for MSVS. """ def ValidateMSBuild(self, value): """Verifies that the value is legal for MSBuild. Args: value: the value to check for this type. Raises: ValueError if value is not valid for MSBuild. """ def ConvertToMSBuild(self, value): """Returns the MSBuild equivalent of the MSVS value given. Args: value: the MSVS value to convert. Returns: the MSBuild equivalent. Raises: ValueError if value is not valid. """ return value class _String(_Type): """A setting that's just a string.""" def ValidateMSVS(self, value): if not isinstance(value, basestring): raise ValueError('expected string; got %r' % value) def ValidateMSBuild(self, value): if not isinstance(value, basestring): raise ValueError('expected string; got %r' % value) def ConvertToMSBuild(self, value): # Convert the macros return ConvertVCMacrosToMSBuild(value) class _StringList(_Type): """A settings that's a list of strings.""" def ValidateMSVS(self, value): if not isinstance(value, basestring) and not isinstance(value, list): raise ValueError('expected string list; got %r' % value) def ValidateMSBuild(self, value): if not isinstance(value, basestring) and not isinstance(value, list): raise ValueError('expected string list; got %r' % value) def ConvertToMSBuild(self, value): # Convert the macros if isinstance(value, list): return [ConvertVCMacrosToMSBuild(i) for i in value] else: return ConvertVCMacrosToMSBuild(value) class _Boolean(_Type): """Boolean settings, can have the values 'false' or 'true'.""" def _Validate(self, value): if value != 'true' and value != 'false': raise ValueError('expected bool; got %r' % value) def ValidateMSVS(self, value): self._Validate(value) def ValidateMSBuild(self, value): self._Validate(value) def ConvertToMSBuild(self, value): self._Validate(value) return value class _Integer(_Type): """Integer settings.""" def __init__(self, msbuild_base=10): _Type.__init__(self) self._msbuild_base = msbuild_base def ValidateMSVS(self, value): # Try to convert, this will raise ValueError if invalid. self.ConvertToMSBuild(value) def ValidateMSBuild(self, value): # Try to convert, this will raise ValueError if invalid. int(value, self._msbuild_base) def ConvertToMSBuild(self, value): msbuild_format = (self._msbuild_base == 10) and '%d' or '0x%04x' return msbuild_format % int(value) class _Enumeration(_Type): """Type of settings that is an enumeration. In MSVS, the values are indexes like '0', '1', and '2'. MSBuild uses text labels that are more representative, like 'Win32'. Constructor args: label_list: an array of MSBuild labels that correspond to the MSVS index. In the rare cases where MSVS has skipped an index value, None is used in the array to indicate the unused spot. new: an array of labels that are new to MSBuild. """ def __init__(self, label_list, new=None): _Type.__init__(self) self._label_list = label_list self._msbuild_values = set(value for value in label_list if value is not None) if new is not None: self._msbuild_values.update(new) def ValidateMSVS(self, value): # Try to convert. It will raise an exception if not valid. self.ConvertToMSBuild(value) def ValidateMSBuild(self, value): if value not in self._msbuild_values: raise ValueError('unrecognized enumerated value %s' % value) def ConvertToMSBuild(self, value): index = int(value) if index < 0 or index >= len(self._label_list): raise ValueError('index value (%d) not in expected range [0, %d)' % (index, len(self._label_list))) label = self._label_list[index] if label is None: raise ValueError('converted value for %s not specified.' % value) return label # Instantiate the various generic types. _boolean = _Boolean() _integer = _Integer() # For now, we don't do any special validation on these types: _string = _String() _file_name = _String() _folder_name = _String() _file_list = _StringList() _folder_list = _StringList() _string_list = _StringList() # Some boolean settings went from numerical values to boolean. The # mapping is 0: default, 1: false, 2: true. _newly_boolean = _Enumeration(['', 'false', 'true']) def _Same(tool, name, setting_type): """Defines a setting that has the same name in MSVS and MSBuild. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. name: the name of the setting. setting_type: the type of this setting. """ _Renamed(tool, name, name, setting_type) def _Renamed(tool, msvs_name, msbuild_name, setting_type): """Defines a setting for which the name has changed. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. msvs_name: the name of the MSVS setting. msbuild_name: the name of the MSBuild setting. setting_type: the type of this setting. """ def _Translate(value, msbuild_settings): msbuild_tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) msbuild_tool_settings[msbuild_name] = setting_type.ConvertToMSBuild(value) _msvs_validators[tool.msvs_name][msvs_name] = setting_type.ValidateMSVS _msbuild_validators[tool.msbuild_name][msbuild_name] = ( setting_type.ValidateMSBuild) _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate def _Moved(tool, settings_name, msbuild_tool_name, setting_type): _MovedAndRenamed(tool, settings_name, msbuild_tool_name, settings_name, setting_type) def _MovedAndRenamed(tool, msvs_settings_name, msbuild_tool_name, msbuild_settings_name, setting_type): """Defines a setting that may have moved to a new section. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. msvs_settings_name: the MSVS name of the setting. msbuild_tool_name: the name of the MSBuild tool to place the setting under. msbuild_settings_name: the MSBuild name of the setting. setting_type: the type of this setting. """ def _Translate(value, msbuild_settings): tool_settings = msbuild_settings.setdefault(msbuild_tool_name, {}) tool_settings[msbuild_settings_name] = setting_type.ConvertToMSBuild(value) _msvs_validators[tool.msvs_name][msvs_settings_name] = ( setting_type.ValidateMSVS) validator = setting_type.ValidateMSBuild _msbuild_validators[msbuild_tool_name][msbuild_settings_name] = validator _msvs_to_msbuild_converters[tool.msvs_name][msvs_settings_name] = _Translate def _MSVSOnly(tool, name, setting_type): """Defines a setting that is only found in MSVS. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. name: the name of the setting. setting_type: the type of this setting. """ def _Translate(unused_value, unused_msbuild_settings): # Since this is for MSVS only settings, no translation will happen. pass _msvs_validators[tool.msvs_name][name] = setting_type.ValidateMSVS _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate def _MSBuildOnly(tool, name, setting_type): """Defines a setting that is only found in MSBuild. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. name: the name of the setting. setting_type: the type of this setting. """ _msbuild_validators[tool.msbuild_name][name] = setting_type.ValidateMSBuild def _ConvertedToAdditionalOption(tool, msvs_name, flag): """Defines a setting that's handled via a command line option in MSBuild. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. msvs_name: the name of the MSVS setting that if 'true' becomes a flag flag: the flag to insert at the end of the AdditionalOptions """ def _Translate(value, msbuild_settings): if value == 'true': tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) if 'AdditionalOptions' in tool_settings: new_flags = '%s %s' % (tool_settings['AdditionalOptions'], flag) else: new_flags = flag tool_settings['AdditionalOptions'] = new_flags _msvs_validators[tool.msvs_name][msvs_name] = _boolean.ValidateMSVS _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate def _CustomGeneratePreprocessedFile(tool, msvs_name): def _Translate(value, msbuild_settings): tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) if value == '0': tool_settings['PreprocessToFile'] = 'false' tool_settings['PreprocessSuppressLineNumbers'] = 'false' elif value == '1': # /P tool_settings['PreprocessToFile'] = 'true' tool_settings['PreprocessSuppressLineNumbers'] = 'false' elif value == '2': # /EP /P tool_settings['PreprocessToFile'] = 'true' tool_settings['PreprocessSuppressLineNumbers'] = 'true' else: raise ValueError('value must be one of [0, 1, 2]; got %s' % value) # Create a bogus validator that looks for '0', '1', or '2' msvs_validator = _Enumeration(['a', 'b', 'c']).ValidateMSVS _msvs_validators[tool.msvs_name][msvs_name] = msvs_validator msbuild_validator = _boolean.ValidateMSBuild msbuild_tool_validators = _msbuild_validators[tool.msbuild_name] msbuild_tool_validators['PreprocessToFile'] = msbuild_validator msbuild_tool_validators['PreprocessSuppressLineNumbers'] = msbuild_validator _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate fix_vc_macro_slashes_regex_list = ('IntDir', 'OutDir') fix_vc_macro_slashes_regex = re.compile( r'(\$\((?:%s)\))(?:[\\/]+)' % "|".join(fix_vc_macro_slashes_regex_list) ) def FixVCMacroSlashes(s): """Replace macros which have excessive following slashes. These macros are known to have a built-in trailing slash. Furthermore, many scripts hiccup on processing paths with extra slashes in the middle. This list is probably not exhaustive. Add as needed. """ if '$' in s: s = fix_vc_macro_slashes_regex.sub(r'\1', s) return s def ConvertVCMacrosToMSBuild(s): """Convert the the MSVS macros found in the string to the MSBuild equivalent. This list is probably not exhaustive. Add as needed. """ if '$' in s: replace_map = { '$(ConfigurationName)': '$(Configuration)', '$(InputDir)': '%(RootDir)%(Directory)', '$(InputExt)': '%(Extension)', '$(InputFileName)': '%(Filename)%(Extension)', '$(InputName)': '%(Filename)', '$(InputPath)': '%(FullPath)', '$(ParentName)': '$(ProjectFileName)', '$(PlatformName)': '$(Platform)', '$(SafeInputName)': '%(Filename)', } for old, new in replace_map.iteritems(): s = s.replace(old, new) s = FixVCMacroSlashes(s) return s def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr): """Converts MSVS settings (VS2008 and earlier) to MSBuild settings (VS2010+). Args: msvs_settings: A dictionary. The key is the tool name. The values are themselves dictionaries of settings and their values. stderr: The stream receiving the error messages. Returns: A dictionary of MSBuild settings. The key is either the MSBuild tool name or the empty string (for the global settings). The values are themselves dictionaries of settings and their values. """ msbuild_settings = {} for msvs_tool_name, msvs_tool_settings in msvs_settings.iteritems(): if msvs_tool_name in _msvs_to_msbuild_converters: msvs_tool = _msvs_to_msbuild_converters[msvs_tool_name] for msvs_setting, msvs_value in msvs_tool_settings.iteritems(): if msvs_setting in msvs_tool: # Invoke the translation function. try: msvs_tool[msvs_setting](msvs_value, msbuild_settings) except ValueError, e: print >> stderr, ('Warning: while converting %s/%s to MSBuild, ' '%s' % (msvs_tool_name, msvs_setting, e)) else: # We don't know this setting. Give a warning. print >> stderr, ('Warning: unrecognized setting %s/%s ' 'while converting to MSBuild.' % (msvs_tool_name, msvs_setting)) else: print >> stderr, ('Warning: unrecognized tool %s while converting to ' 'MSBuild.' % msvs_tool_name) return msbuild_settings def ValidateMSVSSettings(settings, stderr=sys.stderr): """Validates that the names of the settings are valid for MSVS. Args: settings: A dictionary. The key is the tool name. The values are themselves dictionaries of settings and their values. stderr: The stream receiving the error messages. """ _ValidateSettings(_msvs_validators, settings, stderr) def ValidateMSBuildSettings(settings, stderr=sys.stderr): """Validates that the names of the settings are valid for MSBuild. Args: settings: A dictionary. The key is the tool name. The values are themselves dictionaries of settings and their values. stderr: The stream receiving the error messages. """ _ValidateSettings(_msbuild_validators, settings, stderr) def _ValidateSettings(validators, settings, stderr): """Validates that the settings are valid for MSBuild or MSVS. We currently only validate the names of the settings, not their values. Args: validators: A dictionary of tools and their validators. settings: A dictionary. The key is the tool name. The values are themselves dictionaries of settings and their values. stderr: The stream receiving the error messages. """ for tool_name in settings: if tool_name in validators: tool_validators = validators[tool_name] for setting, value in settings[tool_name].iteritems(): if setting in tool_validators: try: tool_validators[setting](value) except ValueError, e: print >> stderr, ('Warning: for %s/%s, %s' % (tool_name, setting, e)) else: print >> stderr, ('Warning: unrecognized setting %s/%s' % (tool_name, setting)) else: print >> stderr, ('Warning: unrecognized tool %s' % tool_name) # MSVS and MBuild names of the tools. _compile = _Tool('VCCLCompilerTool', 'ClCompile') _link = _Tool('VCLinkerTool', 'Link') _midl = _Tool('VCMIDLTool', 'Midl') _rc = _Tool('VCResourceCompilerTool', 'ResourceCompile') _lib = _Tool('VCLibrarianTool', 'Lib') _manifest = _Tool('VCManifestTool', 'Manifest') _AddTool(_compile) _AddTool(_link) _AddTool(_midl) _AddTool(_rc) _AddTool(_lib) _AddTool(_manifest) # Add sections only found in the MSBuild settings. _msbuild_validators[''] = {} _msbuild_validators['ProjectReference'] = {} _msbuild_validators['ManifestResourceCompile'] = {} # Descriptions of the compiler options, i.e. VCCLCompilerTool in MSVS and # ClCompile in MSBuild. # See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\cl.xml" for # the schema of the MSBuild ClCompile settings. # Options that have the same name in MSVS and MSBuild _Same(_compile, 'AdditionalIncludeDirectories', _folder_list) # /I _Same(_compile, 'AdditionalOptions', _string_list) _Same(_compile, 'AdditionalUsingDirectories', _folder_list) # /AI _Same(_compile, 'AssemblerListingLocation', _file_name) # /Fa _Same(_compile, 'BrowseInformationFile', _file_name) _Same(_compile, 'BufferSecurityCheck', _boolean) # /GS _Same(_compile, 'DisableLanguageExtensions', _boolean) # /Za _Same(_compile, 'DisableSpecificWarnings', _string_list) # /wd _Same(_compile, 'EnableFiberSafeOptimizations', _boolean) # /GT _Same(_compile, 'EnablePREfast', _boolean) # /analyze Visible='false' _Same(_compile, 'ExpandAttributedSource', _boolean) # /Fx _Same(_compile, 'FloatingPointExceptions', _boolean) # /fp:except _Same(_compile, 'ForceConformanceInForLoopScope', _boolean) # /Zc:forScope _Same(_compile, 'ForcedIncludeFiles', _file_list) # /FI _Same(_compile, 'ForcedUsingFiles', _file_list) # /FU _Same(_compile, 'GenerateXMLDocumentationFiles', _boolean) # /doc _Same(_compile, 'IgnoreStandardIncludePath', _boolean) # /X _Same(_compile, 'MinimalRebuild', _boolean) # /Gm _Same(_compile, 'OmitDefaultLibName', _boolean) # /Zl _Same(_compile, 'OmitFramePointers', _boolean) # /Oy _Same(_compile, 'PreprocessorDefinitions', _string_list) # /D _Same(_compile, 'ProgramDataBaseFileName', _file_name) # /Fd _Same(_compile, 'RuntimeTypeInfo', _boolean) # /GR _Same(_compile, 'ShowIncludes', _boolean) # /showIncludes _Same(_compile, 'SmallerTypeCheck', _boolean) # /RTCc _Same(_compile, 'StringPooling', _boolean) # /GF _Same(_compile, 'SuppressStartupBanner', _boolean) # /nologo _Same(_compile, 'TreatWChar_tAsBuiltInType', _boolean) # /Zc:wchar_t _Same(_compile, 'UndefineAllPreprocessorDefinitions', _boolean) # /u _Same(_compile, 'UndefinePreprocessorDefinitions', _string_list) # /U _Same(_compile, 'UseFullPaths', _boolean) # /FC _Same(_compile, 'WholeProgramOptimization', _boolean) # /GL _Same(_compile, 'XMLDocumentationFileName', _file_name) _Same(_compile, 'AssemblerOutput', _Enumeration(['NoListing', 'AssemblyCode', # /FA 'All', # /FAcs 'AssemblyAndMachineCode', # /FAc 'AssemblyAndSourceCode'])) # /FAs _Same(_compile, 'BasicRuntimeChecks', _Enumeration(['Default', 'StackFrameRuntimeCheck', # /RTCs 'UninitializedLocalUsageCheck', # /RTCu 'EnableFastChecks'])) # /RTC1 _Same(_compile, 'BrowseInformation', _Enumeration(['false', 'true', # /FR 'true'])) # /Fr _Same(_compile, 'CallingConvention', _Enumeration(['Cdecl', # /Gd 'FastCall', # /Gr 'StdCall'])) # /Gz _Same(_compile, 'CompileAs', _Enumeration(['Default', 'CompileAsC', # /TC 'CompileAsCpp'])) # /TP _Same(_compile, 'DebugInformationFormat', _Enumeration(['', # Disabled 'OldStyle', # /Z7 None, 'ProgramDatabase', # /Zi 'EditAndContinue'])) # /ZI _Same(_compile, 'EnableEnhancedInstructionSet', _Enumeration(['NotSet', 'StreamingSIMDExtensions', # /arch:SSE 'StreamingSIMDExtensions2'])) # /arch:SSE2 _Same(_compile, 'ErrorReporting', _Enumeration(['None', # /errorReport:none 'Prompt', # /errorReport:prompt 'Queue'], # /errorReport:queue new=['Send'])) # /errorReport:send" _Same(_compile, 'ExceptionHandling', _Enumeration(['false', 'Sync', # /EHsc 'Async'], # /EHa new=['SyncCThrow'])) # /EHs _Same(_compile, 'FavorSizeOrSpeed', _Enumeration(['Neither', 'Speed', # /Ot 'Size'])) # /Os _Same(_compile, 'FloatingPointModel', _Enumeration(['Precise', # /fp:precise 'Strict', # /fp:strict 'Fast'])) # /fp:fast _Same(_compile, 'InlineFunctionExpansion', _Enumeration(['Default', 'OnlyExplicitInline', # /Ob1 'AnySuitable'], # /Ob2 new=['Disabled'])) # /Ob0 _Same(_compile, 'Optimization', _Enumeration(['Disabled', # /Od 'MinSpace', # /O1 'MaxSpeed', # /O2 'Full'])) # /Ox _Same(_compile, 'RuntimeLibrary', _Enumeration(['MultiThreaded', # /MT 'MultiThreadedDebug', # /MTd 'MultiThreadedDLL', # /MD 'MultiThreadedDebugDLL'])) # /MDd _Same(_compile, 'StructMemberAlignment', _Enumeration(['Default', '1Byte', # /Zp1 '2Bytes', # /Zp2 '4Bytes', # /Zp4 '8Bytes', # /Zp8 '16Bytes'])) # /Zp16 _Same(_compile, 'WarningLevel', _Enumeration(['TurnOffAllWarnings', # /W0 'Level1', # /W1 'Level2', # /W2 'Level3', # /W3 'Level4'], # /W4 new=['EnableAllWarnings'])) # /Wall # Options found in MSVS that have been renamed in MSBuild. _Renamed(_compile, 'EnableFunctionLevelLinking', 'FunctionLevelLinking', _boolean) # /Gy _Renamed(_compile, 'EnableIntrinsicFunctions', 'IntrinsicFunctions', _boolean) # /Oi _Renamed(_compile, 'KeepComments', 'PreprocessKeepComments', _boolean) # /C _Renamed(_compile, 'ObjectFile', 'ObjectFileName', _file_name) # /Fo _Renamed(_compile, 'OpenMP', 'OpenMPSupport', _boolean) # /openmp _Renamed(_compile, 'PrecompiledHeaderThrough', 'PrecompiledHeaderFile', _file_name) # Used with /Yc and /Yu _Renamed(_compile, 'PrecompiledHeaderFile', 'PrecompiledHeaderOutputFile', _file_name) # /Fp _Renamed(_compile, 'UsePrecompiledHeader', 'PrecompiledHeader', _Enumeration(['NotUsing', # VS recognized '' for this value too. 'Create', # /Yc 'Use'])) # /Yu _Renamed(_compile, 'WarnAsError', 'TreatWarningAsError', _boolean) # /WX _ConvertedToAdditionalOption(_compile, 'DefaultCharIsUnsigned', '/J') # MSVS options not found in MSBuild. _MSVSOnly(_compile, 'Detect64BitPortabilityProblems', _boolean) _MSVSOnly(_compile, 'UseUnicodeResponseFiles', _boolean) # MSBuild options not found in MSVS. _MSBuildOnly(_compile, 'BuildingInIDE', _boolean) _MSBuildOnly(_compile, 'CompileAsManaged', _Enumeration([], new=['false', 'true', # /clr 'Pure', # /clr:pure 'Safe', # /clr:safe 'OldSyntax'])) # /clr:oldSyntax _MSBuildOnly(_compile, 'CreateHotpatchableImage', _boolean) # /hotpatch _MSBuildOnly(_compile, 'MultiProcessorCompilation', _boolean) # /MP _MSBuildOnly(_compile, 'PreprocessOutputPath', _string) # /Fi _MSBuildOnly(_compile, 'ProcessorNumber', _integer) # the number of processors _MSBuildOnly(_compile, 'TrackerLogDirectory', _folder_name) _MSBuildOnly(_compile, 'TreatSpecificWarningsAsErrors', _string_list) # /we _MSBuildOnly(_compile, 'UseUnicodeForAssemblerListing', _boolean) # /FAu # Defines a setting that needs very customized processing _CustomGeneratePreprocessedFile(_compile, 'GeneratePreprocessedFile') # Directives for converting MSVS VCLinkerTool to MSBuild Link. # See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\link.xml" for # the schema of the MSBuild Link settings. # Options that have the same name in MSVS and MSBuild _Same(_link, 'AdditionalDependencies', _file_list) _Same(_link, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH # /MANIFESTDEPENDENCY: _Same(_link, 'AdditionalManifestDependencies', _file_list) _Same(_link, 'AdditionalOptions', _string_list) _Same(_link, 'AddModuleNamesToAssembly', _file_list) # /ASSEMBLYMODULE _Same(_link, 'AllowIsolation', _boolean) # /ALLOWISOLATION _Same(_link, 'AssemblyLinkResource', _file_list) # /ASSEMBLYLINKRESOURCE _Same(_link, 'BaseAddress', _string) # /BASE _Same(_link, 'CLRUnmanagedCodeCheck', _boolean) # /CLRUNMANAGEDCODECHECK _Same(_link, 'DelayLoadDLLs', _file_list) # /DELAYLOAD _Same(_link, 'DelaySign', _boolean) # /DELAYSIGN _Same(_link, 'EmbedManagedResourceFile', _file_list) # /ASSEMBLYRESOURCE _Same(_link, 'EnableUAC', _boolean) # /MANIFESTUAC _Same(_link, 'EntryPointSymbol', _string) # /ENTRY _Same(_link, 'ForceSymbolReferences', _file_list) # /INCLUDE _Same(_link, 'FunctionOrder', _file_name) # /ORDER _Same(_link, 'GenerateDebugInformation', _boolean) # /DEBUG _Same(_link, 'GenerateMapFile', _boolean) # /MAP _Same(_link, 'HeapCommitSize', _string) _Same(_link, 'HeapReserveSize', _string) # /HEAP _Same(_link, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB _Same(_link, 'IgnoreEmbeddedIDL', _boolean) # /IGNOREIDL _Same(_link, 'ImportLibrary', _file_name) # /IMPLIB _Same(_link, 'KeyContainer', _file_name) # /KEYCONTAINER _Same(_link, 'KeyFile', _file_name) # /KEYFILE _Same(_link, 'ManifestFile', _file_name) # /ManifestFile _Same(_link, 'MapExports', _boolean) # /MAPINFO:EXPORTS _Same(_link, 'MapFileName', _file_name) _Same(_link, 'MergedIDLBaseFileName', _file_name) # /IDLOUT _Same(_link, 'MergeSections', _string) # /MERGE _Same(_link, 'MidlCommandFile', _file_name) # /MIDL _Same(_link, 'ModuleDefinitionFile', _file_name) # /DEF _Same(_link, 'OutputFile', _file_name) # /OUT _Same(_link, 'PerUserRedirection', _boolean) _Same(_link, 'Profile', _boolean) # /PROFILE _Same(_link, 'ProfileGuidedDatabase', _file_name) # /PGD _Same(_link, 'ProgramDatabaseFile', _file_name) # /PDB _Same(_link, 'RegisterOutput', _boolean) _Same(_link, 'SetChecksum', _boolean) # /RELEASE _Same(_link, 'StackCommitSize', _string) _Same(_link, 'StackReserveSize', _string) # /STACK _Same(_link, 'StripPrivateSymbols', _file_name) # /PDBSTRIPPED _Same(_link, 'SupportUnloadOfDelayLoadedDLL', _boolean) # /DELAY:UNLOAD _Same(_link, 'SuppressStartupBanner', _boolean) # /NOLOGO _Same(_link, 'SwapRunFromCD', _boolean) # /SWAPRUN:CD _Same(_link, 'TurnOffAssemblyGeneration', _boolean) # /NOASSEMBLY _Same(_link, 'TypeLibraryFile', _file_name) # /TLBOUT _Same(_link, 'TypeLibraryResourceID', _integer) # /TLBID _Same(_link, 'UACUIAccess', _boolean) # /uiAccess='true' _Same(_link, 'Version', _string) # /VERSION _Same(_link, 'EnableCOMDATFolding', _newly_boolean) # /OPT:ICF _Same(_link, 'FixedBaseAddress', _newly_boolean) # /FIXED _Same(_link, 'LargeAddressAware', _newly_boolean) # /LARGEADDRESSAWARE _Same(_link, 'OptimizeReferences', _newly_boolean) # /OPT:REF _Same(_link, 'RandomizedBaseAddress', _newly_boolean) # /DYNAMICBASE _Same(_link, 'TerminalServerAware', _newly_boolean) # /TSAWARE _subsystem_enumeration = _Enumeration( ['NotSet', 'Console', # /SUBSYSTEM:CONSOLE 'Windows', # /SUBSYSTEM:WINDOWS 'Native', # /SUBSYSTEM:NATIVE 'EFI Application', # /SUBSYSTEM:EFI_APPLICATION 'EFI Boot Service Driver', # /SUBSYSTEM:EFI_BOOT_SERVICE_DRIVER 'EFI ROM', # /SUBSYSTEM:EFI_ROM 'EFI Runtime', # /SUBSYSTEM:EFI_RUNTIME_DRIVER 'WindowsCE'], # /SUBSYSTEM:WINDOWSCE new=['POSIX']) # /SUBSYSTEM:POSIX _target_machine_enumeration = _Enumeration( ['NotSet', 'MachineX86', # /MACHINE:X86 None, 'MachineARM', # /MACHINE:ARM 'MachineEBC', # /MACHINE:EBC 'MachineIA64', # /MACHINE:IA64 None, 'MachineMIPS', # /MACHINE:MIPS 'MachineMIPS16', # /MACHINE:MIPS16 'MachineMIPSFPU', # /MACHINE:MIPSFPU 'MachineMIPSFPU16', # /MACHINE:MIPSFPU16 None, None, None, 'MachineSH4', # /MACHINE:SH4 None, 'MachineTHUMB', # /MACHINE:THUMB 'MachineX64']) # /MACHINE:X64 _Same(_link, 'AssemblyDebug', _Enumeration(['', 'true', # /ASSEMBLYDEBUG 'false'])) # /ASSEMBLYDEBUG:DISABLE _Same(_link, 'CLRImageType', _Enumeration(['Default', 'ForceIJWImage', # /CLRIMAGETYPE:IJW 'ForcePureILImage', # /Switch="CLRIMAGETYPE:PURE 'ForceSafeILImage'])) # /Switch="CLRIMAGETYPE:SAFE _Same(_link, 'CLRThreadAttribute', _Enumeration(['DefaultThreadingAttribute', # /CLRTHREADATTRIBUTE:NONE 'MTAThreadingAttribute', # /CLRTHREADATTRIBUTE:MTA 'STAThreadingAttribute'])) # /CLRTHREADATTRIBUTE:STA _Same(_link, 'DataExecutionPrevention', _Enumeration(['', 'false', # /NXCOMPAT:NO 'true'])) # /NXCOMPAT _Same(_link, 'Driver', _Enumeration(['NotSet', 'Driver', # /Driver 'UpOnly', # /DRIVER:UPONLY 'WDM'])) # /DRIVER:WDM _Same(_link, 'LinkTimeCodeGeneration', _Enumeration(['Default', 'UseLinkTimeCodeGeneration', # /LTCG 'PGInstrument', # /LTCG:PGInstrument 'PGOptimization', # /LTCG:PGOptimize 'PGUpdate'])) # /LTCG:PGUpdate _Same(_link, 'ShowProgress', _Enumeration(['NotSet', 'LinkVerbose', # /VERBOSE 'LinkVerboseLib'], # /VERBOSE:Lib new=['LinkVerboseICF', # /VERBOSE:ICF 'LinkVerboseREF', # /VERBOSE:REF 'LinkVerboseSAFESEH', # /VERBOSE:SAFESEH 'LinkVerboseCLR'])) # /VERBOSE:CLR _Same(_link, 'SubSystem', _subsystem_enumeration) _Same(_link, 'TargetMachine', _target_machine_enumeration) _Same(_link, 'UACExecutionLevel', _Enumeration(['AsInvoker', # /level='asInvoker' 'HighestAvailable', # /level='highestAvailable' 'RequireAdministrator'])) # /level='requireAdministrator' _Same(_link, 'MinimumRequiredVersion', _string) _Same(_link, 'TreatLinkerWarningAsErrors', _boolean) # /WX # Options found in MSVS that have been renamed in MSBuild. _Renamed(_link, 'ErrorReporting', 'LinkErrorReporting', _Enumeration(['NoErrorReport', # /ERRORREPORT:NONE 'PromptImmediately', # /ERRORREPORT:PROMPT 'QueueForNextLogin'], # /ERRORREPORT:QUEUE new=['SendErrorReport'])) # /ERRORREPORT:SEND _Renamed(_link, 'IgnoreDefaultLibraryNames', 'IgnoreSpecificDefaultLibraries', _file_list) # /NODEFAULTLIB _Renamed(_link, 'ResourceOnlyDLL', 'NoEntryPoint', _boolean) # /NOENTRY _Renamed(_link, 'SwapRunFromNet', 'SwapRunFromNET', _boolean) # /SWAPRUN:NET _Moved(_link, 'GenerateManifest', '', _boolean) _Moved(_link, 'IgnoreImportLibrary', '', _boolean) _Moved(_link, 'LinkIncremental', '', _newly_boolean) _Moved(_link, 'LinkLibraryDependencies', 'ProjectReference', _boolean) _Moved(_link, 'UseLibraryDependencyInputs', 'ProjectReference', _boolean) # MSVS options not found in MSBuild. _MSVSOnly(_link, 'OptimizeForWindows98', _newly_boolean) _MSVSOnly(_link, 'UseUnicodeResponseFiles', _boolean) # These settings generate correctly in the MSVS output files when using # e.g. DelayLoadDLLs! or AdditionalDependencies! to exclude files from # configuration entries, but result in spurious artifacts which can be # safely ignored here. See crbug.com/246570 _MSVSOnly(_link, 'AdditionalLibraryDirectories_excluded', _folder_list) _MSVSOnly(_link, 'DelayLoadDLLs_excluded', _file_list) _MSVSOnly(_link, 'AdditionalDependencies_excluded', _file_list) # MSBuild options not found in MSVS. _MSBuildOnly(_link, 'BuildingInIDE', _boolean) _MSBuildOnly(_link, 'ImageHasSafeExceptionHandlers', _boolean) # /SAFESEH _MSBuildOnly(_link, 'LinkDLL', _boolean) # /DLL Visible='false' _MSBuildOnly(_link, 'LinkStatus', _boolean) # /LTCG:STATUS _MSBuildOnly(_link, 'PreventDllBinding', _boolean) # /ALLOWBIND _MSBuildOnly(_link, 'SupportNobindOfDelayLoadedDLL', _boolean) # /DELAY:NOBIND _MSBuildOnly(_link, 'TrackerLogDirectory', _folder_name) _MSBuildOnly(_link, 'MSDOSStubFileName', _file_name) # /STUB Visible='false' _MSBuildOnly(_link, 'SectionAlignment', _integer) # /ALIGN _MSBuildOnly(_link, 'SpecifySectionAttributes', _string) # /SECTION _MSBuildOnly(_link, 'ForceFileOutput', _Enumeration([], new=['Enabled', # /FORCE # /FORCE:MULTIPLE 'MultiplyDefinedSymbolOnly', 'UndefinedSymbolOnly'])) # /FORCE:UNRESOLVED _MSBuildOnly(_link, 'CreateHotPatchableImage', _Enumeration([], new=['Enabled', # /FUNCTIONPADMIN 'X86Image', # /FUNCTIONPADMIN:5 'X64Image', # /FUNCTIONPADMIN:6 'ItaniumImage'])) # /FUNCTIONPADMIN:16 _MSBuildOnly(_link, 'CLRSupportLastError', _Enumeration([], new=['Enabled', # /CLRSupportLastError 'Disabled', # /CLRSupportLastError:NO # /CLRSupportLastError:SYSTEMDLL 'SystemDlls'])) # Directives for converting VCResourceCompilerTool to ResourceCompile. # See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\rc.xml" for # the schema of the MSBuild ResourceCompile settings. _Same(_rc, 'AdditionalOptions', _string_list) _Same(_rc, 'AdditionalIncludeDirectories', _folder_list) # /I _Same(_rc, 'Culture', _Integer(msbuild_base=16)) _Same(_rc, 'IgnoreStandardIncludePath', _boolean) # /X _Same(_rc, 'PreprocessorDefinitions', _string_list) # /D _Same(_rc, 'ResourceOutputFileName', _string) # /fo _Same(_rc, 'ShowProgress', _boolean) # /v # There is no UI in VisualStudio 2008 to set the following properties. # However they are found in CL and other tools. Include them here for # completeness, as they are very likely to have the same usage pattern. _Same(_rc, 'SuppressStartupBanner', _boolean) # /nologo _Same(_rc, 'UndefinePreprocessorDefinitions', _string_list) # /u # MSBuild options not found in MSVS. _MSBuildOnly(_rc, 'NullTerminateStrings', _boolean) # /n _MSBuildOnly(_rc, 'TrackerLogDirectory', _folder_name) # Directives for converting VCMIDLTool to Midl. # See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\midl.xml" for # the schema of the MSBuild Midl settings. _Same(_midl, 'AdditionalIncludeDirectories', _folder_list) # /I _Same(_midl, 'AdditionalOptions', _string_list) _Same(_midl, 'CPreprocessOptions', _string) # /cpp_opt _Same(_midl, 'ErrorCheckAllocations', _boolean) # /error allocation _Same(_midl, 'ErrorCheckBounds', _boolean) # /error bounds_check _Same(_midl, 'ErrorCheckEnumRange', _boolean) # /error enum _Same(_midl, 'ErrorCheckRefPointers', _boolean) # /error ref _Same(_midl, 'ErrorCheckStubData', _boolean) # /error stub_data _Same(_midl, 'GenerateStublessProxies', _boolean) # /Oicf _Same(_midl, 'GenerateTypeLibrary', _boolean) _Same(_midl, 'HeaderFileName', _file_name) # /h _Same(_midl, 'IgnoreStandardIncludePath', _boolean) # /no_def_idir _Same(_midl, 'InterfaceIdentifierFileName', _file_name) # /iid _Same(_midl, 'MkTypLibCompatible', _boolean) # /mktyplib203 _Same(_midl, 'OutputDirectory', _string) # /out _Same(_midl, 'PreprocessorDefinitions', _string_list) # /D _Same(_midl, 'ProxyFileName', _file_name) # /proxy _Same(_midl, 'RedirectOutputAndErrors', _file_name) # /o _Same(_midl, 'SuppressStartupBanner', _boolean) # /nologo _Same(_midl, 'TypeLibraryName', _file_name) # /tlb _Same(_midl, 'UndefinePreprocessorDefinitions', _string_list) # /U _Same(_midl, 'WarnAsError', _boolean) # /WX _Same(_midl, 'DefaultCharType', _Enumeration(['Unsigned', # /char unsigned 'Signed', # /char signed 'Ascii'])) # /char ascii7 _Same(_midl, 'TargetEnvironment', _Enumeration(['NotSet', 'Win32', # /env win32 'Itanium', # /env ia64 'X64'])) # /env x64 _Same(_midl, 'EnableErrorChecks', _Enumeration(['EnableCustom', 'None', # /error none 'All'])) # /error all _Same(_midl, 'StructMemberAlignment', _Enumeration(['NotSet', '1', # Zp1 '2', # Zp2 '4', # Zp4 '8'])) # Zp8 _Same(_midl, 'WarningLevel', _Enumeration(['0', # /W0 '1', # /W1 '2', # /W2 '3', # /W3 '4'])) # /W4 _Renamed(_midl, 'DLLDataFileName', 'DllDataFileName', _file_name) # /dlldata _Renamed(_midl, 'ValidateParameters', 'ValidateAllParameters', _boolean) # /robust # MSBuild options not found in MSVS. _MSBuildOnly(_midl, 'ApplicationConfigurationMode', _boolean) # /app_config _MSBuildOnly(_midl, 'ClientStubFile', _file_name) # /cstub _MSBuildOnly(_midl, 'GenerateClientFiles', _Enumeration([], new=['Stub', # /client stub 'None'])) # /client none _MSBuildOnly(_midl, 'GenerateServerFiles', _Enumeration([], new=['Stub', # /client stub 'None'])) # /client none _MSBuildOnly(_midl, 'LocaleID', _integer) # /lcid DECIMAL _MSBuildOnly(_midl, 'ServerStubFile', _file_name) # /sstub _MSBuildOnly(_midl, 'SuppressCompilerWarnings', _boolean) # /no_warn _MSBuildOnly(_midl, 'TrackerLogDirectory', _folder_name) _MSBuildOnly(_midl, 'TypeLibFormat', _Enumeration([], new=['NewFormat', # /newtlb 'OldFormat'])) # /oldtlb # Directives for converting VCLibrarianTool to Lib. # See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\lib.xml" for # the schema of the MSBuild Lib settings. _Same(_lib, 'AdditionalDependencies', _file_list) _Same(_lib, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH _Same(_lib, 'AdditionalOptions', _string_list) _Same(_lib, 'ExportNamedFunctions', _string_list) # /EXPORT _Same(_lib, 'ForceSymbolReferences', _string) # /INCLUDE _Same(_lib, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB _Same(_lib, 'IgnoreSpecificDefaultLibraries', _file_list) # /NODEFAULTLIB _Same(_lib, 'ModuleDefinitionFile', _file_name) # /DEF _Same(_lib, 'OutputFile', _file_name) # /OUT _Same(_lib, 'SuppressStartupBanner', _boolean) # /NOLOGO _Same(_lib, 'UseUnicodeResponseFiles', _boolean) _Same(_lib, 'LinkTimeCodeGeneration', _boolean) # /LTCG _Same(_lib, 'TargetMachine', _target_machine_enumeration) # TODO(jeanluc) _link defines the same value that gets moved to # ProjectReference. We may want to validate that they are consistent. _Moved(_lib, 'LinkLibraryDependencies', 'ProjectReference', _boolean) # TODO(jeanluc) I don't think these are genuine settings but byproducts of Gyp. _MSVSOnly(_lib, 'AdditionalLibraryDirectories_excluded', _folder_list) _MSBuildOnly(_lib, 'DisplayLibrary', _string) # /LIST Visible='false' _MSBuildOnly(_lib, 'ErrorReporting', _Enumeration([], new=['PromptImmediately', # /ERRORREPORT:PROMPT 'QueueForNextLogin', # /ERRORREPORT:QUEUE 'SendErrorReport', # /ERRORREPORT:SEND 'NoErrorReport'])) # /ERRORREPORT:NONE _MSBuildOnly(_lib, 'MinimumRequiredVersion', _string) _MSBuildOnly(_lib, 'Name', _file_name) # /NAME _MSBuildOnly(_lib, 'RemoveObjects', _file_list) # /REMOVE _MSBuildOnly(_lib, 'SubSystem', _subsystem_enumeration) _MSBuildOnly(_lib, 'TrackerLogDirectory', _folder_name) _MSBuildOnly(_lib, 'TreatLibWarningAsErrors', _boolean) # /WX _MSBuildOnly(_lib, 'Verbose', _boolean) # Directives for converting VCManifestTool to Mt. # See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\mt.xml" for # the schema of the MSBuild Lib settings. # Options that have the same name in MSVS and MSBuild _Same(_manifest, 'AdditionalManifestFiles', _file_list) # /manifest _Same(_manifest, 'AdditionalOptions', _string_list) _Same(_manifest, 'AssemblyIdentity', _string) # /identity: _Same(_manifest, 'ComponentFileName', _file_name) # /dll _Same(_manifest, 'GenerateCatalogFiles', _boolean) # /makecdfs _Same(_manifest, 'InputResourceManifests', _string) # /inputresource _Same(_manifest, 'OutputManifestFile', _file_name) # /out _Same(_manifest, 'RegistrarScriptFile', _file_name) # /rgs _Same(_manifest, 'ReplacementsFile', _file_name) # /replacements _Same(_manifest, 'SuppressStartupBanner', _boolean) # /nologo _Same(_manifest, 'TypeLibraryFile', _file_name) # /tlb: _Same(_manifest, 'UpdateFileHashes', _boolean) # /hashupdate _Same(_manifest, 'UpdateFileHashesSearchPath', _file_name) _Same(_manifest, 'VerboseOutput', _boolean) # /verbose # Options that have moved location. _MovedAndRenamed(_manifest, 'ManifestResourceFile', 'ManifestResourceCompile', 'ResourceOutputFileName', _file_name) _Moved(_manifest, 'EmbedManifest', '', _boolean) # MSVS options not found in MSBuild. _MSVSOnly(_manifest, 'DependencyInformationFile', _file_name) _MSVSOnly(_manifest, 'UseFAT32Workaround', _boolean) _MSVSOnly(_manifest, 'UseUnicodeResponseFiles', _boolean) # MSBuild options not found in MSVS. _MSBuildOnly(_manifest, 'EnableDPIAwareness', _boolean) _MSBuildOnly(_manifest, 'GenerateCategoryTags', _boolean) # /category _MSBuildOnly(_manifest, 'ManifestFromManagedAssembly', _file_name) # /managedassemblyname _MSBuildOnly(_manifest, 'OutputResourceManifests', _string) # /outputresource _MSBuildOnly(_manifest, 'SuppressDependencyElement', _boolean) # /nodependency _MSBuildOnly(_manifest, 'TrackerLogDirectory', _folder_name)
mit
tailorian/Sick-Beard
lib/bs4/builder/_lxml.py
446
8661
__all__ = [ 'LXMLTreeBuilderForXML', 'LXMLTreeBuilder', ] from io import BytesIO from StringIO import StringIO import collections from lxml import etree from bs4.element import Comment, Doctype, NamespacedAttribute from bs4.builder import ( FAST, HTML, HTMLTreeBuilder, PERMISSIVE, ParserRejectedMarkup, TreeBuilder, XML) from bs4.dammit import EncodingDetector LXML = 'lxml' class LXMLTreeBuilderForXML(TreeBuilder): DEFAULT_PARSER_CLASS = etree.XMLParser is_xml = True # Well, it's permissive by XML parser standards. features = [LXML, XML, FAST, PERMISSIVE] CHUNK_SIZE = 512 # This namespace mapping is specified in the XML Namespace # standard. DEFAULT_NSMAPS = {'http://www.w3.org/XML/1998/namespace' : "xml"} def default_parser(self, encoding): # This can either return a parser object or a class, which # will be instantiated with default arguments. if self._default_parser is not None: return self._default_parser return etree.XMLParser( target=self, strip_cdata=False, recover=True, encoding=encoding) def parser_for(self, encoding): # Use the default parser. parser = self.default_parser(encoding) if isinstance(parser, collections.Callable): # Instantiate the parser with default arguments parser = parser(target=self, strip_cdata=False, encoding=encoding) return parser def __init__(self, parser=None, empty_element_tags=None): # TODO: Issue a warning if parser is present but not a # callable, since that means there's no way to create new # parsers for different encodings. self._default_parser = parser if empty_element_tags is not None: self.empty_element_tags = set(empty_element_tags) self.soup = None self.nsmaps = [self.DEFAULT_NSMAPS] def _getNsTag(self, tag): # Split the namespace URL out of a fully-qualified lxml tag # name. Copied from lxml's src/lxml/sax.py. if tag[0] == '{': return tuple(tag[1:].split('}', 1)) else: return (None, tag) def prepare_markup(self, markup, user_specified_encoding=None, document_declared_encoding=None): """ :yield: A series of 4-tuples. (markup, encoding, declared encoding, has undergone character replacement) Each 4-tuple represents a strategy for parsing the document. """ if isinstance(markup, unicode): # We were given Unicode. Maybe lxml can parse Unicode on # this system? yield markup, None, document_declared_encoding, False if isinstance(markup, unicode): # No, apparently not. Convert the Unicode to UTF-8 and # tell lxml to parse it as UTF-8. yield (markup.encode("utf8"), "utf8", document_declared_encoding, False) # Instead of using UnicodeDammit to convert the bytestring to # Unicode using different encodings, use EncodingDetector to # iterate over the encodings, and tell lxml to try to parse # the document as each one in turn. is_html = not self.is_xml try_encodings = [user_specified_encoding, document_declared_encoding] detector = EncodingDetector(markup, try_encodings, is_html) for encoding in detector.encodings: yield (detector.markup, encoding, document_declared_encoding, False) def feed(self, markup): if isinstance(markup, bytes): markup = BytesIO(markup) elif isinstance(markup, unicode): markup = StringIO(markup) # Call feed() at least once, even if the markup is empty, # or the parser won't be initialized. data = markup.read(self.CHUNK_SIZE) try: self.parser = self.parser_for(self.soup.original_encoding) self.parser.feed(data) while len(data) != 0: # Now call feed() on the rest of the data, chunk by chunk. data = markup.read(self.CHUNK_SIZE) if len(data) != 0: self.parser.feed(data) self.parser.close() except (UnicodeDecodeError, LookupError, etree.ParserError), e: raise ParserRejectedMarkup(str(e)) def close(self): self.nsmaps = [self.DEFAULT_NSMAPS] def start(self, name, attrs, nsmap={}): # Make sure attrs is a mutable dict--lxml may send an immutable dictproxy. attrs = dict(attrs) nsprefix = None # Invert each namespace map as it comes in. if len(self.nsmaps) > 1: # There are no new namespaces for this tag, but # non-default namespaces are in play, so we need a # separate tag stack to know when they end. self.nsmaps.append(None) elif len(nsmap) > 0: # A new namespace mapping has come into play. inverted_nsmap = dict((value, key) for key, value in nsmap.items()) self.nsmaps.append(inverted_nsmap) # Also treat the namespace mapping as a set of attributes on the # tag, so we can recreate it later. attrs = attrs.copy() for prefix, namespace in nsmap.items(): attribute = NamespacedAttribute( "xmlns", prefix, "http://www.w3.org/2000/xmlns/") attrs[attribute] = namespace # Namespaces are in play. Find any attributes that came in # from lxml with namespaces attached to their names, and # turn then into NamespacedAttribute objects. new_attrs = {} for attr, value in attrs.items(): namespace, attr = self._getNsTag(attr) if namespace is None: new_attrs[attr] = value else: nsprefix = self._prefix_for_namespace(namespace) attr = NamespacedAttribute(nsprefix, attr, namespace) new_attrs[attr] = value attrs = new_attrs namespace, name = self._getNsTag(name) nsprefix = self._prefix_for_namespace(namespace) self.soup.handle_starttag(name, namespace, nsprefix, attrs) def _prefix_for_namespace(self, namespace): """Find the currently active prefix for the given namespace.""" if namespace is None: return None for inverted_nsmap in reversed(self.nsmaps): if inverted_nsmap is not None and namespace in inverted_nsmap: return inverted_nsmap[namespace] return None def end(self, name): self.soup.endData() completed_tag = self.soup.tagStack[-1] namespace, name = self._getNsTag(name) nsprefix = None if namespace is not None: for inverted_nsmap in reversed(self.nsmaps): if inverted_nsmap is not None and namespace in inverted_nsmap: nsprefix = inverted_nsmap[namespace] break self.soup.handle_endtag(name, nsprefix) if len(self.nsmaps) > 1: # This tag, or one of its parents, introduced a namespace # mapping, so pop it off the stack. self.nsmaps.pop() def pi(self, target, data): pass def data(self, content): self.soup.handle_data(content) def doctype(self, name, pubid, system): self.soup.endData() doctype = Doctype.for_name_and_ids(name, pubid, system) self.soup.object_was_parsed(doctype) def comment(self, content): "Handle comments as Comment objects." self.soup.endData() self.soup.handle_data(content) self.soup.endData(Comment) def test_fragment_to_document(self, fragment): """See `TreeBuilder`.""" return u'<?xml version="1.0" encoding="utf-8"?>\n%s' % fragment class LXMLTreeBuilder(HTMLTreeBuilder, LXMLTreeBuilderForXML): features = [LXML, HTML, FAST, PERMISSIVE] is_xml = False def default_parser(self, encoding): return etree.HTMLParser def feed(self, markup): encoding = self.soup.original_encoding try: self.parser = self.parser_for(encoding) self.parser.feed(markup) self.parser.close() except (UnicodeDecodeError, LookupError, etree.ParserError), e: raise ParserRejectedMarkup(str(e)) def test_fragment_to_document(self, fragment): """See `TreeBuilder`.""" return u'<html><body>%s</body></html>' % fragment
gpl-3.0
gurneyalex/OpenUpgrade
addons/report_intrastat/report_intrastat.py
39
5615
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv from openerp.tools.sql import drop_view_if_exists from openerp.addons.decimal_precision import decimal_precision as dp class res_country(osv.osv): _name = 'res.country' _inherit = 'res.country' _columns = { 'intrastat': fields.boolean('Intrastat member'), } _defaults = { 'intrastat': lambda *a: False, } class report_intrastat_code(osv.osv): _name = "report.intrastat.code" _description = "Intrastat code" _columns = { 'name': fields.char('Intrastat Code', size=16), 'description': fields.char('Description', size=64), } class product_template(osv.osv): _name = "product.template" _inherit = "product.template" _columns = { 'intrastat_id': fields.many2one('report.intrastat.code', 'Intrastat code'), } class report_intrastat(osv.osv): _name = "report.intrastat" _description = "Intrastat report" _auto = False _columns = { 'name': fields.char('Year',size=64,required=False, readonly=True), 'month':fields.selection([('01','January'), ('02','February'), ('03','March'), ('04','April'), ('05','May'), ('06','June'), ('07','July'), ('08','August'), ('09','September'), ('10','October'), ('11','November'), ('12','December')],'Month',readonly=True), 'supply_units':fields.float('Supply Units', readonly=True), 'ref':fields.char('Source document',size=64, readonly=True), 'code': fields.char('Country code', size=2, readonly=True), 'intrastat_id': fields.many2one('report.intrastat.code', 'Intrastat code', readonly=True), 'weight': fields.float('Weight', readonly=True), 'value': fields.float('Value', readonly=True, digits_compute=dp.get_precision('Account')), 'type': fields.selection([('import', 'Import'), ('export', 'Export')], 'Type'), 'currency_id': fields.many2one('res.currency', "Currency", readonly=True), } def init(self, cr): drop_view_if_exists(cr, 'report_intrastat') cr.execute(""" create or replace view report_intrastat as ( select to_char(inv.create_date, 'YYYY') as name, to_char(inv.create_date, 'MM') as month, min(inv_line.id) as id, intrastat.id as intrastat_id, upper(inv_country.code) as code, sum(case when inv_line.price_unit is not null then inv_line.price_unit * inv_line.quantity else 0 end) as value, sum( case when uom.category_id != puom.category_id then (pt.weight_net * inv_line.quantity) else (pt.weight_net * inv_line.quantity * uom.factor) end ) as weight, sum( case when uom.category_id != puom.category_id then inv_line.quantity else (inv_line.quantity * uom.factor) end ) as supply_units, inv.currency_id as currency_id, inv.number as ref, case when inv.type in ('out_invoice','in_refund') then 'export' else 'import' end as type from account_invoice inv left join account_invoice_line inv_line on inv_line.invoice_id=inv.id left join (product_template pt left join product_product pp on (pp.product_tmpl_id = pt.id)) on (inv_line.product_id = pp.id) left join product_uom uom on uom.id=inv_line.uos_id left join product_uom puom on puom.id = pt.uom_id left join report_intrastat_code intrastat on pt.intrastat_id = intrastat.id left join (res_partner inv_address left join res_country inv_country on (inv_country.id = inv_address.country_id)) on (inv_address.id = inv.partner_id) where inv.state in ('open','paid') and inv_line.product_id is not null and inv_country.intrastat=true group by to_char(inv.create_date, 'YYYY'), to_char(inv.create_date, 'MM'),intrastat.id,inv.type,pt.intrastat_id, inv_country.code,inv.number, inv.currency_id )""") # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
Tilo15/PhotoFiddle2
PF2/Tools/HueEqualiser.py
1
5526
import cv2 import numpy import Tool class HueEqualiser(Tool.Tool): def on_init(self): self.id = "hueequaliser" self.name = "Hue Equaliser" self.icon_path = "ui/PF2_Icons/HueEqualiser.png" self.properties = [ Tool.Property("header", "Hue Equaliser", "Header", None, has_toggle=False, has_button=False), Tool.Property("bleed", "Hue Bleed", "Slider", 0.5, max=2.0, min=0.01), Tool.Property("neighbour_bleed", "Neighbour Bleed", "Slider", 0.25, max=2.0, min=0.0), # Red Tool.Property("header_red", "Red", "Header", None, has_toggle=False, has_button=False), Tool.Property("red_value", "Value", "Slider", 0, max=50, min=-50), Tool.Property("red_saturation", "Saturation", "Slider", 0, max=50, min=-50), # Yellow Tool.Property("header_yellow", "Yellow", "Header", None, has_toggle=False, has_button=False), Tool.Property("yellow_value", "Value", "Slider", 0, max=50, min=-50), Tool.Property("yellow_saturation", "Saturation", "Slider", 0, max=50, min=-50), # Green Tool.Property("header_green", "Green", "Header", None, has_toggle=False, has_button=False), Tool.Property("green_value", "Value", "Slider", 0, max=50, min=-50), Tool.Property("green_saturation", "Saturation", "Slider", 0, max=50, min=-50), # Cyan Tool.Property("header_cyan", "Cyan", "Header", None, has_toggle=False, has_button=False), Tool.Property("cyan_value", "Value", "Slider", 0, max=50, min=-50), Tool.Property("cyan_saturation", "Saturation", "Slider", 0, max=50, min=-50), # Blue Tool.Property("header_blue", "Blue", "Header", None, has_toggle=False, has_button=False), Tool.Property("blue_value", "Value", "Slider", 0, max=50, min=-50), Tool.Property("blue_saturation", "Saturation", "Slider", 0, max=50, min=-50), # Violet Tool.Property("header_violet", "Violet", "Header", None, has_toggle=False, has_button=False), Tool.Property("violet_value", "Value", "Slider", 0, max=50, min=-50), Tool.Property("violet_saturation", "Saturation", "Slider", 0, max=50, min=-50), ] def on_update(self, image): hues = { "red": 0, "yellow": 60, "green": 120, "cyan": 180, "blue": 240, "violet": 300, "_red": 360, } out = image if(not self.is_default()): bleed = self.props["bleed"].get_value() neighbour_bleed = self.props["neighbour_bleed"].get_value() out = out.astype(numpy.float32) # Convert to HSV colorspace out = cv2.cvtColor(out, cv2.COLOR_BGR2HSV) # Bits per pixel bpp = float(str(image.dtype).replace("uint", "").replace("float", "")) # Pixel value range np = float(2 ** bpp - 1) imhue = out[0:, 0:, 0] imsat = out[0:, 0:, 1] imval = out[0:, 0:, 2] for hue in hues: hsat = self.props["%s_saturation" % hue.replace('_', '')].get_value() hval = self.props["%s_value" % hue.replace('_', '')].get_value() isHue = self._is_hue(imhue, hues[hue], (3.5/bleed)) isHue = self._neighbour_bleed(isHue, neighbour_bleed) imsat = imsat + ((hsat / 10000) * 255) * isHue imval = imval + ((hval / 1000) * np) * isHue # Clip any values out of bounds imval[imval < 0.0] = 0.0 imval[imval > np] = np imsat[imsat < 0.0] = 0.0 imsat[imsat > 1.0] = 1.0 out[0:, 0:, 1] = imsat out[0:, 0:, 2] = imval # Convert back to BGR colorspace out = cv2.cvtColor(out, cv2.COLOR_HSV2BGR) out = out.astype(image.dtype) return out def _is_hue(self, image, hue_value, bleed_value = 3.5): mif = hue_value - 30 mir = hue_value + 30 if (mir > 360): mir = 360 if (mif < 0): mif = 0 bleed = float(360 / bleed_value) icopy = image.copy() print(bleed, mif, mir) if(mif != 0): icopy[icopy < mif - bleed] = 0.0 icopy[icopy > mir + bleed] = 0.0 icopy[(icopy < mif) * (icopy != 0.0)] = (((mif - (icopy[(icopy < mif) * (icopy != 0.0)]))/360.0) / (bleed/360.0)) * -1 + 1 icopy[(icopy > mir) * (icopy != 0.0)] = ((((icopy[(icopy > mir) * (icopy != 0.0)]) - mir)/360.0) / (bleed/360.0)) * -1 + 1 icopy[(icopy >= mif) * (icopy <= mir)] = 1.0 if(mif == 0): icopy[icopy > mir + bleed] = 0.0 icopy[(icopy > mir) * (icopy != 0.0)] = ((((icopy[(icopy > mir) * (icopy != 0.0)]) - mir) / 360.0) / (bleed/360.0)) * -1 + 1 return icopy def _neighbour_bleed(self, map, bleed): strength = bleed*30 if (strength > 0): height, width = map.shape[:2] size = (height * width) mul = numpy.math.sqrt(size) / 1064.416 # numpy.math.sqrt(1132982.0) map = map*255 blur_size = abs(2 * round((round(strength * mul) + 1) / 2) - 1) im = cv2.blur(map, (int(blur_size), int(blur_size))) return im/255.0 return map
gpl-3.0
OCA/sale-workflow
sale_product_set/wizard/product_set_add.py
1
3428
# Copyright 2015 Anybox S.A.S # Copyright 2016-2018 Camptocamp SA # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). from odoo import models, fields, api, exceptions, _ import odoo.addons.decimal_precision as dp class ProductSetAdd(models.TransientModel): _name = 'product.set.add' _rec_name = 'product_set_id' _description = "Wizard model to add product set into a quotation" order_id = fields.Many2one( 'sale.order', 'Sale Order', required=True, default=lambda self: self.env.context.get('active_id'), ondelete='cascade' ) partner_id = fields.Many2one( related='order_id.partner_id', ondelete='cascade' ) product_set_id = fields.Many2one( 'product.set', 'Product set', required=True, ondelete='cascade' ) quantity = fields.Float( digits=dp.get_precision('Product Unit of Measure'), required=True, default=1) skip_existing_products = fields.Boolean( default=False, help='Enable this to not add new lines ' 'for products already included in SO lines.' ) def _check_partner(self): if self.product_set_id.partner_id: if self.product_set_id.partner_id != self.order_id.partner_id: raise exceptions.ValidationError(_( "Select a product set assigned to " "the same partner of the order." )) @api.multi def add_set(self): """ Add product set, multiplied by quantity in sale order line """ self._check_partner() order_lines = self._prepare_order_lines() if order_lines: self.order_id.write({ "order_line": order_lines }) return order_lines def _prepare_order_lines(self): max_sequence = self._get_max_sequence() order_lines = [] for set_line in self._get_lines(): order_lines.append( (0, 0, self.prepare_sale_order_line_data( set_line, max_sequence=max_sequence)) ) return order_lines def _get_max_sequence(self): max_sequence = 0 if self.order_id.order_line: max_sequence = max([ line.sequence for line in self.order_id.order_line ]) return max_sequence def _get_lines(self): # hook here to take control on used lines so_product_ids = self.order_id.order_line.mapped('product_id').ids for set_line in self.product_set_id.set_line_ids: if (self.skip_existing_products and set_line.product_id.id in so_product_ids): continue yield set_line @api.multi def prepare_sale_order_line_data(self, set_line, max_sequence=0): self.ensure_one() sale_line = self.env['sale.order.line'].new({ 'order_id': self.order_id.id, 'product_id': set_line.product_id.id, 'product_uom_qty': set_line.quantity * self.quantity, 'product_uom': set_line.product_id.uom_id.id, 'sequence': max_sequence + set_line.sequence, 'discount': set_line.discount, }) sale_line.product_id_change() line_values = sale_line._convert_to_write(sale_line._cache) return line_values
agpl-3.0
MjAbuz/django-social-auth
setup.py
3
2193
# -*- coding: utf-8 -*- """Setup file for easy installation""" from os.path import join, dirname from setuptools import setup version = __import__('social_auth').__version__ LONG_DESCRIPTION = """ Django Social Auth is an easy to setup social authentication/registration mechanism for Django projects. Crafted using base code from django-twitter-oauth_ and django-openid-auth_, implements a common interface to define new authentication providers from third parties. """ def long_description(): """Return long description from README.rst if it's present because it doesn't get installed.""" try: return open(join(dirname(__file__), 'README.rst')).read() except IOError: return LONG_DESCRIPTION setup(name='django-social-auth', version=version, author='Matías Aguirre', author_email='[email protected]', description='Django social authentication made simple.', license='BSD', keywords='django, openid, oauth, social auth, application', url='https://github.com/omab/django-social-auth', packages=['social_auth', 'social_auth.management', 'social_auth.management.commands', 'social_auth.backends', 'social_auth.backends.contrib', 'social_auth.backends.pipeline', 'social_auth.migrations', 'social_auth.tests', 'social_auth.db'], package_data={'social_auth': ['locale/*/LC_MESSAGES/*']}, long_description=long_description(), install_requires=['django>=1.2.5', 'oauth2>=1.5.167', 'python_openid>=2.2'], classifiers=['Framework :: Django', 'Development Status :: 4 - Beta', 'Topic :: Internet', 'License :: OSI Approved :: BSD License', 'Intended Audience :: Developers', 'Environment :: Web Environment', 'Programming Language :: Python :: 2.5', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7'], zip_safe=False)
bsd-3-clause
xfumihiro/powerline
powerline/lint/markedjson/error.py
33
6948
# vim:fileencoding=utf-8:noet from __future__ import (unicode_literals, division, absolute_import, print_function) import sys import re from powerline.lib.encoding import get_preferred_output_encoding NON_PRINTABLE_STR = ( '[^' # ASCII control characters: 0x00-0x19 + '\t\n' # Tab, newline: allowed ASCII control characters + '\x20-\x7E' # ASCII printable characters # Unicode control characters: 0x7F-0x9F + '\u0085' # Allowed unicode control character: next line character + '\u00A0-\uD7FF' # Surrogate escapes: 0xD800-0xDFFF + '\uE000-\uFFFD' + (( '\uD800-\uDFFF' ) if sys.maxunicode < 0x10FFFF else ( '\U00010000-\U0010FFFF' )) + ']' + (( # Paired surrogate escapes: allowed in UCS-2 builds as the only way to # represent characters above 0xFFFF. Only paired variant is allowed. '|(?<![\uD800-\uDBFF])[\uDC00-\uDFFF]' + '|[\uD800-\uDBFF](?![\uDC00-\uDFFF])' ) if sys.maxunicode < 0x10FFFF else ( '' )) ) NON_PRINTABLE_RE = re.compile(NON_PRINTABLE_STR) def repl(s): return '<x%04x>' % ord(s.group()) def strtrans(s): return NON_PRINTABLE_RE.sub(repl, s.replace('\t', '>---')) class Mark: def __init__(self, name, line, column, buffer, pointer, old_mark=None, merged_marks=None): self.name = name self.line = line self.column = column self.buffer = buffer self.pointer = pointer self.old_mark = old_mark self.merged_marks = merged_marks or [] def copy(self): return Mark(self.name, self.line, self.column, self.buffer, self.pointer, self.old_mark, self.merged_marks[:]) def get_snippet(self, indent=4, max_length=75): if self.buffer is None: return None head = '' start = self.pointer while start > 0 and self.buffer[start - 1] not in '\0\n': start -= 1 if self.pointer - start > max_length / 2 - 1: head = ' ... ' start += 5 break tail = '' end = self.pointer while end < len(self.buffer) and self.buffer[end] not in '\0\n': end += 1 if end - self.pointer > max_length / 2 - 1: tail = ' ... ' end -= 5 break snippet = [self.buffer[start:self.pointer], self.buffer[self.pointer], self.buffer[self.pointer + 1:end]] snippet = [strtrans(s) for s in snippet] return ( ' ' * indent + head + ''.join(snippet) + tail + '\n' + ' ' * (indent + len(head) + len(snippet[0])) + '^' ) def advance_string(self, diff): ret = self.copy() # FIXME Currently does not work properly with escaped strings. ret.column += diff ret.pointer += diff return ret def set_old_mark(self, old_mark): if self is old_mark: return checked_marks = set([id(self)]) older_mark = old_mark while True: if id(older_mark) in checked_marks: raise ValueError('Trying to set recursive marks') checked_marks.add(id(older_mark)) older_mark = older_mark.old_mark if not older_mark: break self.old_mark = old_mark def set_merged_mark(self, merged_mark): self.merged_marks.append(merged_mark) def to_string(self, indent=0, head_text='in ', add_snippet=True): mark = self where = '' processed_marks = set() while mark: indentstr = ' ' * indent where += ('%s %s"%s", line %d, column %d' % ( indentstr, head_text, mark.name, mark.line + 1, mark.column + 1)) if add_snippet: snippet = mark.get_snippet(indent=(indent + 4)) if snippet: where += ':\n' + snippet if mark.merged_marks: where += '\n' + indentstr + ' with additionally merged\n' where += mark.merged_marks[0].to_string(indent + 4, head_text='', add_snippet=False) for mmark in mark.merged_marks[1:]: where += '\n' + indentstr + ' and\n' where += mmark.to_string(indent + 4, head_text='', add_snippet=False) if add_snippet: processed_marks.add(id(mark)) if mark.old_mark: where += '\n' + indentstr + ' which replaced value\n' indent += 4 mark = mark.old_mark if id(mark) in processed_marks: raise ValueError('Trying to dump recursive mark') return where if sys.version_info < (3,): def __str__(self): return self.to_string().encode('utf-8') def __unicode__(self): return self.to_string() else: def __str__(self): return self.to_string() def __eq__(self, other): return self is other or ( self.name == other.name and self.line == other.line and self.column == other.column ) if sys.version_info < (3,): def echoerr(**kwargs): stream = kwargs.pop('stream', sys.stderr) stream.write('\n') stream.write((format_error(**kwargs) + '\n').encode(get_preferred_output_encoding())) else: def echoerr(**kwargs): stream = kwargs.pop('stream', sys.stderr) stream.write('\n') stream.write(format_error(**kwargs) + '\n') def format_error(context=None, context_mark=None, problem=None, problem_mark=None, note=None, indent=0): lines = [] indentstr = ' ' * indent if context is not None: lines.append(indentstr + context) if ( context_mark is not None and ( problem is None or problem_mark is None or context_mark != problem_mark ) ): lines.append(context_mark.to_string(indent=indent)) if problem is not None: lines.append(indentstr + problem) if problem_mark is not None: lines.append(problem_mark.to_string(indent=indent)) if note is not None: lines.append(indentstr + note) return '\n'.join(lines) class MarkedError(Exception): def __init__(self, context=None, context_mark=None, problem=None, problem_mark=None, note=None): Exception.__init__(self, format_error(context, context_mark, problem, problem_mark, note)) class EchoErr(object): __slots__ = ('echoerr', 'logger', 'indent') def __init__(self, echoerr, logger, indent=0): self.echoerr = echoerr self.logger = logger self.indent = indent def __call__(self, **kwargs): kwargs = kwargs.copy() kwargs.setdefault('indent', self.indent) self.echoerr(**kwargs) class DelayedEchoErr(EchoErr): __slots__ = ('echoerr', 'logger', 'errs', 'message', 'separator_message', 'indent', 'indent_shift') def __init__(self, echoerr, message='', separator_message=''): super(DelayedEchoErr, self).__init__(echoerr, echoerr.logger) self.errs = [[]] self.message = message self.separator_message = separator_message self.indent_shift = (4 if message or separator_message else 0) self.indent = echoerr.indent + self.indent_shift def __call__(self, **kwargs): kwargs = kwargs.copy() kwargs['indent'] = kwargs.get('indent', 0) + self.indent self.errs[-1].append(kwargs) def next_variant(self): self.errs.append([]) def echo_all(self): if self.message: self.echoerr(problem=self.message, indent=(self.indent - self.indent_shift)) for variant in self.errs: if not variant: continue if self.separator_message and variant is not self.errs[0]: self.echoerr(problem=self.separator_message, indent=(self.indent - self.indent_shift)) for kwargs in variant: self.echoerr(**kwargs) def __nonzero__(self): return not not self.errs __bool__ = __nonzero__
mit
pbrazdil/phantomjs
src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/servers/rebaselineserver_unittest.py
119
12052
# Copyright (C) 2010 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import json import unittest2 as unittest from webkitpy.common.net import resultsjsonparser_unittest from webkitpy.common.host_mock import MockHost from webkitpy.layout_tests.layout_package.json_results_generator import strip_json_wrapper from webkitpy.port.base import Port from webkitpy.tool.commands.rebaselineserver import TestConfig, RebaselineServer from webkitpy.tool.servers import rebaselineserver class RebaselineTestTest(unittest.TestCase): def test_text_rebaseline_update(self): self._assertRebaseline( test_files=( 'fast/text-expected.txt', 'platform/mac/fast/text-expected.txt', ), results_files=( 'fast/text-actual.txt', ), test_name='fast/text.html', baseline_target='mac', baseline_move_to='none', expected_success=True, expected_log=[ 'Rebaselining fast/text...', ' Updating baselines for mac', ' Updated text-expected.txt', ]) def test_text_rebaseline_new(self): self._assertRebaseline( test_files=( 'fast/text-expected.txt', ), results_files=( 'fast/text-actual.txt', ), test_name='fast/text.html', baseline_target='mac', baseline_move_to='none', expected_success=True, expected_log=[ 'Rebaselining fast/text...', ' Updating baselines for mac', ' Updated text-expected.txt', ]) def test_text_rebaseline_move_no_op_1(self): self._assertRebaseline( test_files=( 'fast/text-expected.txt', 'platform/win/fast/text-expected.txt', ), results_files=( 'fast/text-actual.txt', ), test_name='fast/text.html', baseline_target='mac', baseline_move_to='mac-leopard', expected_success=True, expected_log=[ 'Rebaselining fast/text...', ' Updating baselines for mac', ' Updated text-expected.txt', ]) def test_text_rebaseline_move_no_op_2(self): self._assertRebaseline( test_files=( 'fast/text-expected.txt', 'platform/mac/fast/text-expected.checksum', ), results_files=( 'fast/text-actual.txt', ), test_name='fast/text.html', baseline_target='mac', baseline_move_to='mac-leopard', expected_success=True, expected_log=[ 'Rebaselining fast/text...', ' Moving current mac baselines to mac-leopard', ' No current baselines to move', ' Updating baselines for mac', ' Updated text-expected.txt', ]) def test_text_rebaseline_move(self): self._assertRebaseline( test_files=( 'fast/text-expected.txt', 'platform/mac/fast/text-expected.txt', ), results_files=( 'fast/text-actual.txt', ), test_name='fast/text.html', baseline_target='mac', baseline_move_to='mac-leopard', expected_success=True, expected_log=[ 'Rebaselining fast/text...', ' Moving current mac baselines to mac-leopard', ' Moved text-expected.txt', ' Updating baselines for mac', ' Updated text-expected.txt', ]) def test_text_rebaseline_move_only_images(self): self._assertRebaseline( test_files=( 'fast/image-expected.txt', 'platform/mac/fast/image-expected.txt', 'platform/mac/fast/image-expected.png', 'platform/mac/fast/image-expected.checksum', ), results_files=( 'fast/image-actual.png', 'fast/image-actual.checksum', ), test_name='fast/image.html', baseline_target='mac', baseline_move_to='mac-leopard', expected_success=True, expected_log=[ 'Rebaselining fast/image...', ' Moving current mac baselines to mac-leopard', ' Moved image-expected.checksum', ' Moved image-expected.png', ' Updating baselines for mac', ' Updated image-expected.checksum', ' Updated image-expected.png', ]) def test_text_rebaseline_move_already_exist(self): self._assertRebaseline( test_files=( 'fast/text-expected.txt', 'platform/mac-leopard/fast/text-expected.txt', 'platform/mac/fast/text-expected.txt', ), results_files=( 'fast/text-actual.txt', ), test_name='fast/text.html', baseline_target='mac', baseline_move_to='mac-leopard', expected_success=False, expected_log=[ 'Rebaselining fast/text...', ' Moving current mac baselines to mac-leopard', ' Already had baselines in mac-leopard, could not move existing mac ones', ]) def test_image_rebaseline(self): self._assertRebaseline( test_files=( 'fast/image-expected.txt', 'platform/mac/fast/image-expected.png', 'platform/mac/fast/image-expected.checksum', ), results_files=( 'fast/image-actual.png', 'fast/image-actual.checksum', ), test_name='fast/image.html', baseline_target='mac', baseline_move_to='none', expected_success=True, expected_log=[ 'Rebaselining fast/image...', ' Updating baselines for mac', ' Updated image-expected.checksum', ' Updated image-expected.png', ]) def test_gather_baselines(self): example_json = resultsjsonparser_unittest.ResultsJSONParserTest._example_full_results_json results_json = json.loads(strip_json_wrapper(example_json)) server = RebaselineServer() server._test_config = get_test_config() server._gather_baselines(results_json) self.assertEqual(results_json['tests']['svg/dynamic-updates/SVGFEDropShadowElement-dom-stdDeviation-attr.html']['state'], 'needs_rebaseline') self.assertNotIn('prototype-chocolate.html', results_json['tests']) def _assertRebaseline(self, test_files, results_files, test_name, baseline_target, baseline_move_to, expected_success, expected_log): log = [] test_config = get_test_config(test_files, results_files) success = rebaselineserver._rebaseline_test( test_name, baseline_target, baseline_move_to, test_config, log=lambda l: log.append(l)) self.assertEqual(expected_log, log) self.assertEqual(expected_success, success) class GetActualResultFilesTest(unittest.TestCase): def test(self): test_config = get_test_config(result_files=( 'fast/text-actual.txt', 'fast2/text-actual.txt', 'fast/text2-actual.txt', 'fast/text-notactual.txt', )) self.assertItemsEqual( ('text-actual.txt',), rebaselineserver._get_actual_result_files( 'fast/text.html', test_config)) class GetBaselinesTest(unittest.TestCase): def test_no_baselines(self): self._assertBaselines( test_files=(), test_name='fast/missing.html', expected_baselines={}) def test_text_baselines(self): self._assertBaselines( test_files=( 'fast/text-expected.txt', 'platform/mac/fast/text-expected.txt', ), test_name='fast/text.html', expected_baselines={ 'mac': {'.txt': True}, 'base': {'.txt': False}, }) def test_image_and_text_baselines(self): self._assertBaselines( test_files=( 'fast/image-expected.txt', 'platform/mac/fast/image-expected.png', 'platform/mac/fast/image-expected.checksum', 'platform/win/fast/image-expected.png', 'platform/win/fast/image-expected.checksum', ), test_name='fast/image.html', expected_baselines={ 'base': {'.txt': True}, 'mac': {'.checksum': True, '.png': True}, 'win': {'.checksum': False, '.png': False}, }) def test_extra_baselines(self): self._assertBaselines( test_files=( 'fast/text-expected.txt', 'platform/nosuchplatform/fast/text-expected.txt', ), test_name='fast/text.html', expected_baselines={'base': {'.txt': True}}) def _assertBaselines(self, test_files, test_name, expected_baselines): actual_baselines = rebaselineserver.get_test_baselines(test_name, get_test_config(test_files)) self.assertEqual(expected_baselines, actual_baselines) def get_test_config(test_files=[], result_files=[]): # We could grab this from port.layout_tests_dir(), but instantiating a fully mocked port is a pain. layout_tests_directory = "/mock-checkout/LayoutTests" results_directory = '/WebKitBuild/Debug/layout-test-results' host = MockHost() for file in test_files: host.filesystem.write_binary_file(host.filesystem.join(layout_tests_directory, file), '') for file in result_files: host.filesystem.write_binary_file(host.filesystem.join(results_directory, file), '') class TestMacPort(Port): port_name = "mac" return TestConfig( TestMacPort(host, 'mac'), layout_tests_directory, results_directory, ('mac', 'mac-leopard', 'win', 'linux'), host.filesystem, host.scm())
bsd-3-clause
alexax66/CM13_kernel_serranodsxx
scripts/gcc-wrapper.py
234
4095
#! /usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2011-2012, The Linux Foundation. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of The Linux Foundation nor # the names of its contributors may be used to endorse or promote # products derived from this software without specific prior written # permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NON-INFRINGEMENT ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; # OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR # OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF # ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # Invoke gcc, looking for warnings, and causing a failure if there are # non-whitelisted warnings. import errno import re import os import sys import subprocess # Note that gcc uses unicode, which may depend on the locale. TODO: # force LANG to be set to en_US.UTF-8 to get consistent warnings. allowed_warnings = set([ "alignment.c:327", "mmu.c:602", "return_address.c:62", "swab.h:49", "SemaLambda.cpp:946", "CGObjCGNU.cpp:1414", "BugReporter.h:146", "RegionStore.cpp:1904", "SymbolManager.cpp:484", "RewriteObjCFoundationAPI.cpp:737", "RewriteObjCFoundationAPI.cpp:696", "CommentParser.cpp:394", "CommentParser.cpp:391", "CommentParser.cpp:356", "LegalizeDAG.cpp:3646", "IRBuilder.h:844", "DataLayout.cpp:193", "transport.c:653", "xt_socket.c:307", "xt_socket.c:161", "inet_hashtables.h:356", "xc4000.c:1049", "xc4000.c:1063", "f_qdss.c:586", "mipi_tc358764_dsi2lvds.c:746", "dynamic_debug.h:75", "hci_conn.c:407", "f_qdss.c:740", "mipi_novatek.c:569", "swab.h:34", ]) # Capture the name of the object file, can find it. ofile = None warning_re = re.compile(r'''(.*/|)([^/]+\.[a-z]+:\d+):(\d+:)? warning:''') def interpret_warning(line): """Decode the message from gcc. The messages we care about have a filename, and a warning""" line = line.rstrip('\n') m = warning_re.match(line) if m and m.group(2) not in allowed_warnings: print "error, forbidden warning:", m.group(2) # If there is a warning, remove any object if it exists. if ofile: try: os.remove(ofile) except OSError: pass sys.exit(1) def run_gcc(): args = sys.argv[1:] # Look for -o try: i = args.index('-o') global ofile ofile = args[i+1] except (ValueError, IndexError): pass compiler = sys.argv[0] try: proc = subprocess.Popen(args, stderr=subprocess.PIPE) for line in proc.stderr: print line, interpret_warning(line) result = proc.wait() except OSError as e: result = e.errno if result == errno.ENOENT: print args[0] + ':',e.strerror print 'Is your PATH set correctly?' else: print ' '.join(args), str(e) return result if __name__ == '__main__': status = run_gcc() sys.exit(status)
gpl-2.0
askeing/servo
tests/wpt/web-platform-tests/tools/third_party/pluggy/testing/test_details.py
43
2753
import warnings import pytest from pluggy import PluginManager, HookimplMarker, HookspecMarker, _Result hookspec = HookspecMarker("example") hookimpl = HookimplMarker("example") def test_parse_hookimpl_override(): class MyPluginManager(PluginManager): def parse_hookimpl_opts(self, module_or_class, name): opts = PluginManager.parse_hookimpl_opts( self, module_or_class, name) if opts is None: if name.startswith("x1"): opts = {} return opts class Plugin(object): def x1meth(self): pass @hookimpl(hookwrapper=True, tryfirst=True) def x1meth2(self): pass class Spec(object): @hookspec def x1meth(self): pass @hookspec def x1meth2(self): pass pm = MyPluginManager(hookspec.project_name) pm.register(Plugin()) pm.add_hookspecs(Spec) assert not pm.hook.x1meth._nonwrappers[0].hookwrapper assert not pm.hook.x1meth._nonwrappers[0].tryfirst assert not pm.hook.x1meth._nonwrappers[0].trylast assert not pm.hook.x1meth._nonwrappers[0].optionalhook assert pm.hook.x1meth2._wrappers[0].tryfirst assert pm.hook.x1meth2._wrappers[0].hookwrapper def test_plugin_getattr_raises_errors(): """Pluggy must be able to handle plugins which raise weird exceptions when getattr() gets called (#11). """ class DontTouchMe(object): def __getattr__(self, x): raise Exception('cant touch me') class Module(object): pass module = Module() module.x = DontTouchMe() pm = PluginManager(hookspec.project_name) # register() would raise an error pm.register(module, 'donttouch') assert pm.get_plugin('donttouch') is module def test_warning_on_call_vs_hookspec_arg_mismatch(): """Verify that is a hook is called with less arguments then defined in the spec that a warning is emitted. """ class Spec: @hookspec def myhook(self, arg1, arg2): pass class Plugin: @hookimpl def myhook(self, arg1): pass pm = PluginManager(hookspec.project_name) pm.register(Plugin()) pm.add_hookspecs(Spec()) with warnings.catch_warnings(record=True) as warns: warnings.simplefilter('always') # calling should trigger a warning pm.hook.myhook(arg1=1) assert len(warns) == 1 warning = warns[-1] assert issubclass(warning.category, Warning) assert "Argument(s) ('arg2',)" in str(warning.message) def test_result_deprecated(): r = _Result(10, None) with pytest.deprecated_call(): assert r.result == 10
mpl-2.0
rjschof/gem5
src/arch/x86/isa/insts/simd128/integer/data_conversion/convert_gpr_integer_to_floating_point.py
91
2989
# Copyright (c) 2007 The Hewlett-Packard Development Company # All rights reserved. # # The license below extends only to copyright in the software and shall # not be construed as granting a license to any other intellectual # property including but not limited to intellectual property relating # to a hardware implementation of the functionality of the software # licensed hereunder. You may use the software subject to the license # terms below provided that you ensure that this notice is replicated # unmodified and in its entirety in all distributions of the software, # modified or unmodified, in source code or in binary form. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Gabe Black microcode = ''' def macroop CVTSI2SS_XMM_R { mov2fp ufp1, regm, destSize=dsz, srcSize=dsz cvti2f xmml, ufp1, srcSize=dsz, destSize=4, ext=Scalar }; def macroop CVTSI2SS_XMM_M { ldfp ufp1, seg, sib, disp, dataSize=8 cvti2f xmml, ufp1, srcSize=dsz, destSize=4, ext=Scalar }; def macroop CVTSI2SS_XMM_P { rdip t7 ldfp ufp1, seg, riprel, disp, dataSize=8 cvti2f xmml, ufp1, srcSize=dsz, destSize=4, ext=Scalar }; def macroop CVTSI2SD_XMM_R { mov2fp ufp1, regm, destSize=dsz, srcSize=dsz cvti2f xmml, ufp1, srcSize=dsz, destSize=8, ext=Scalar }; def macroop CVTSI2SD_XMM_M { ldfp ufp1, seg, sib, disp, dataSize=8 cvti2f xmml, ufp1, srcSize=dsz, destSize=8, ext=Scalar }; def macroop CVTSI2SD_XMM_P { rdip t7 ldfp ufp1, seg, riprel, disp, dataSize=8 cvti2f xmml, ufp1, srcSize=dsz, destSize=8, ext=Scalar }; '''
bsd-3-clause
dungvtdev/upsbayescpm
bayespy/inference/vmp/nodes/tests/test_beta.py
3
2667
################################################################################ # Copyright (C) 2014 Jaakko Luttinen # # This file is licensed under the MIT License. ################################################################################ """ Unit tests for `beta` module. """ import numpy as np from scipy import special from bayespy.nodes import Beta from bayespy.utils import random from bayespy.utils.misc import TestCase class TestBeta(TestCase): """ Unit tests for Beta node """ def test_init(self): """ Test the creation of beta nodes. """ # Some simple initializations p = Beta([1.5, 4.2]) # Check that plates are correct p = Beta([2, 3], plates=(4,3)) self.assertEqual(p.plates, (4,3)) p = Beta(np.ones((4,3,2))) self.assertEqual(p.plates, (4,3)) # Parent not a vector self.assertRaises(ValueError, Beta, 4) # Parent vector has wrong shape self.assertRaises(ValueError, Beta, [4]) self.assertRaises(ValueError, Beta, [4,4,4]) # Parent vector has invalid values self.assertRaises(ValueError, Beta, [-2,3]) # Plates inconsistent self.assertRaises(ValueError, Beta, np.ones((4,2)), plates=(3,)) # Explicit plates too small self.assertRaises(ValueError, Beta, np.ones((4,2)), plates=(1,)) pass def test_moments(self): """ Test the moments of beta nodes. """ p = Beta([2, 3]) u = p._message_to_child() self.assertAllClose(u[0], special.psi([2,3]) - special.psi(2+3)) pass def test_random(self): """ Test random sampling of beta nodes. """ p = Beta([1e20, 3e20]) x = p.random() self.assertAllClose(x, 0.25) p = Beta([[1e20, 3e20], [1e20, 1e20]]) x = p.random() self.assertAllClose(x, [0.25, 0.5]) p = Beta([1e20, 3e20], plates=(3,)) x = p.random() self.assertAllClose(x, [0.25, 0.25, 0.25]) pass
mit
marguslaak/django-xadmin
xadmin/plugins/refresh.py
28
1272
# coding=utf-8 from django.template import loader from xadmin.sites import site from xadmin.views import BaseAdminPlugin, ListAdminView REFRESH_VAR = '_refresh' class RefreshPlugin(BaseAdminPlugin): refresh_times = [] # Media def get_media(self, media): if self.refresh_times and self.request.GET.get(REFRESH_VAR): media = media + self.vendor('xadmin.plugin.refresh.js') return media # Block Views def block_top_toolbar(self, context, nodes): if self.refresh_times: current_refresh = self.request.GET.get(REFRESH_VAR) context.update({ 'has_refresh': bool(current_refresh), 'clean_refresh_url': self.admin_view.get_query_string(remove=(REFRESH_VAR,)), 'current_refresh': current_refresh, 'refresh_times': [{ 'time': r, 'url': self.admin_view.get_query_string({REFRESH_VAR: r}), 'selected': str(r) == current_refresh, } for r in self.refresh_times], }) nodes.append(loader.render_to_string('xadmin/blocks/model_list.top_toolbar.refresh.html', context_instance=context)) site.register_plugin(RefreshPlugin, ListAdminView)
bsd-3-clause
SiccarPoint/landlab
landlab/components/nonlinear_diffusion/examples/drive_perron.py
6
2924
from __future__ import print_function import numpy from landlab import RasterModelGrid, CLOSED_BOUNDARY from landlab import ModelParameterDictionary from landlab.components.nonlinear_diffusion.Perron_nl_diffuse import PerronNLDiffuse import pylab import time inputs = ModelParameterDictionary('./drive_perron_params.txt') nrows = inputs.read_int('nrows') ncols = inputs.read_int('ncols') dx = inputs.read_float('dx') dt = inputs.read_float('dt') time_to_run = inputs.read_float('run_time') # nt needs defining uplift = inputs.read_float('uplift_rate') init_elev = inputs.read_float('init_elev') mg = RasterModelGrid(nrows, ncols, dx) #mg.set_inactive_boundaries(False, False, False, False) # mg.set_inactive_boundaries(True,True,True,True) mg.set_looped_boundaries(True, True) #create the fields in the grid mg.add_zeros('topographic__elevation', at='node') z = mg.zeros(at='node') + init_elev mg['node'][ 'topographic__elevation'] = z + numpy.random.rand(len(z))/1000. # Now add a step to diffuse out: # mg.at_node['topographic__elevation'][mg.active_nodes[:(mg.active_nodes.shape[0]//2.)]] # += 0.05 #half block uplift # pylab.figure(1) # pylab.close() #elev = mg['node']['topographic__elevation'] #elev_r = mg.node_vector_to_raster(elev) # pylab.figure(1) #im = pylab.imshow(elev_r, cmap=pylab.cm.RdBu) # pylab.show() # Display a message print('Running ...') start_time = time.time() # instantiate the component: diffusion_component = PerronNLDiffuse(mg, './drive_perron_params.txt') # perform the loop: elapsed_time = 0. # total time in simulation while elapsed_time < time_to_run: print(elapsed_time) if elapsed_time + dt < time_to_run: diffusion_component.input_timestep(dt) mg.at_node['topographic__elevation'][mg.core_nodes] += uplift * dt # mg.at_node['topographic__elevation'][mg.active_nodes[:(mg.active_nodes.shape[0]//2.)]] += uplift*dt #half block uplift # mg.at_node['topographic__elevation'][mg.active_nodes] += (numpy.arange(len(mg.active_nodes))) #nodes are tagged with their ID # pylab.figure(1) # pylab.close() #elev = mg['node']['topographic__elevation'] #elev_r = mg.node_vector_to_raster(elev) # pylab.figure(1) #im = pylab.imshow(elev_r, cmap=pylab.cm.RdBu) # pylab.show() mg = diffusion_component.diffuse(mg, elapsed_time) elapsed_time += dt #Finalize and plot elev = mg['node']['topographic__elevation'] elev_r = mg.node_vector_to_raster(elev) # Clear previous plots pylab.figure(1) pylab.close() # Plot topography pylab.figure(1) im = pylab.imshow(elev_r, cmap=pylab.cm.RdBu) # display a colored image print(elev_r) pylab.colorbar(im) pylab.title('Topography') pylab.figure(2) # display a colored image im = pylab.plot(dx * numpy.arange(nrows), elev_r[:, int(ncols // 2)]) pylab.title('Vertical cross section') pylab.show() print('Done.') print(('Total run time = ' + str(time.time() - start_time) + ' seconds.'))
mit
wolfelee/luokr.com
www.luokr.com/app/ctrls/admin/posts.py
1
10035
#coding=utf-8 from admin import admin, AdminCtrl class Admin_PostsCtrl(AdminCtrl): @admin def get(self): pager = {} pager['qnty'] = min(int(self.input('qnty', 10)), 50) pager['page'] = max(int(self.input('page', 1)), 1) pager['list'] = 0; cur_posts = self.dbase('posts').cursor() cur_users = self.dbase('users').cursor() cur_posts.execute('select * from posts order by post_id desc limit ? offset ?', (pager['qnty'], (pager['page']-1)*pager['qnty'], )) posts = cur_posts.fetchall() psers = {} if posts: pager['list'] = len(posts) cur_users.execute('select * from users where user_id in (' + ','.join(str(i['user_id']) for i in posts) + ')') psers = self.utils().array_keyto(cur_users.fetchall(), 'user_id') cur_posts.close() cur_users.close() self.render('admin/posts.html', pager = pager, posts = posts, psers = psers) class Admin_PostHiddenCtrl(AdminCtrl): @admin def post(self): try: post_id = self.input('post_id') con = self.dbase('posts') cur = con.cursor() cur.execute('update posts set post_stat = 0 where post_id = ?', (post_id, )) con.commit() cur.close() self.flash(1) except: self.flash(0) class Admin_PostCreateCtrl(AdminCtrl): @admin def get(self): cur = self.dbase('terms').cursor() cur.execute('select * from terms order by term_id desc, term_refc desc limit 9') terms = cur.fetchall() cur.close() mode = self.input('mode', None) self.render('admin/post-create.html', mode = mode, terms = terms) @admin def post(self): try: user = self.current_user post_type = self.input('post_type', 'blog') post_title = self.input('post_title') post_descp = self.input('post_descp') post_author = self.input('post_author') post_source = self.input('post_source') post_summary = self.input('post_summary') post_content = self.input('post_content') post_rank = self.input('post_rank') post_stat = self.input('post_stat', 0) post_ptms = int(self.timer().mktime(self.timer().strptime(self.input('post_ptms'), '%Y-%m-%d %H:%M:%S'))) post_ctms = self.stime() post_utms = post_ctms term_list = [] for term_name in self.input('term_list').split(' '): if term_name == '': continue term_list.append(term_name) if len(term_list) > 10: self.flash(0, {'msg': '标签数量限制不能超过 10 个'}) return con_posts = self.dbase('posts') cur_posts = con_posts.cursor() con_terms = self.dbase('terms') cur_terms = con_terms.cursor() term_imap = {} term_ctms = self.stime() for term_name in term_list: cur_terms.execute('select term_id from terms where term_name = ?', (term_name ,)) term_id = cur_terms.fetchone() if term_id: term_id = term_id['term_id'] else: cur_terms.execute('insert or ignore into terms (term_name, term_ctms) values (?, ?)', (term_name , term_ctms, )) if cur_terms.lastrowid: term_id = cur_terms.lastrowid if term_id: term_imap[term_id] = term_name cur_posts.execute('insert into posts (user_id, post_type, post_title, post_descp, post_author, post_source, post_summary, post_content,post_stat, post_rank, post_ptms, post_ctms, post_utms) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', \ (user['user_id'], post_type, post_title, post_descp, post_author, post_source, post_summary, post_content, post_stat, post_rank, post_ptms, post_ctms, post_utms ,)) post_id = cur_posts.lastrowid if term_imap: for term_id in term_imap: cur_posts.execute('insert or ignore into post_terms (post_id, term_id) values (' + str(post_id) + ',' + str(term_id) + ')') if term_imap: cur_terms.execute('update terms set term_refc = term_refc + 1 where term_id in (' + ','.join([str(i) for i in term_imap.keys()]) + ')') con_posts.commit() cur_posts.close() con_terms.commit() con_terms.close() self.model('alogs').add(self.dbase('alogs'), '新增文章:' + str(post_id), user_ip = self.request.remote_ip, user_id = user['user_id'], user_name = user['user_name']) self.flash(1, {'url': '/admin/post?post_id=' + str(post_id)}) except: self.flash(0) class Admin_PostCtrl(AdminCtrl): @admin def get(self): post_id = self.input('post_id') con_posts = self.dbase('posts') cur_posts = con_posts.cursor() cur_posts.execute('select * from posts where post_id = ?', (post_id, )) post = cur_posts.fetchone() if not post: cur_posts.close() return self.send_error(404) mode = self.input('mode', None) con_terms = self.dbase('terms') cur_terms = con_terms.cursor() cur_terms.execute('select * from terms order by term_id desc, term_refc desc limit 9') terms = cur_terms.fetchall() ptids = {} ptags = {} cur_posts.execute('select post_id,term_id from post_terms where post_id = ?', (post_id, )) ptids = cur_posts.fetchall() if ptids: cur_terms.execute('select * from terms where term_id in (' + ','.join(str(i['term_id']) for i in ptids) + ')') ptags = cur_terms.fetchall() if ptags: ptids = self.utils().array_group(ptids, 'post_id') ptags = self.utils().array_keyto(ptags, 'term_id') cur_posts.close() cur_terms.close() self.render('admin/post.html', mode = mode, post = post, terms = terms, ptids = ptids, ptags = ptags) @admin def post(self): try: user = self.current_user post_id = self.input('post_id') post_title = self.input('post_title') post_descp = self.input('post_descp') post_author = self.input('post_author') post_source = self.input('post_source') post_summary = self.input('post_summary') post_content = self.input('post_content') post_rank = self.input('post_rank') post_stat = self.input('post_stat', 0) post_ptms = int(self.timer().mktime(self.timer().strptime(self.input('post_ptms'), '%Y-%m-%d %H:%M:%S'))) post_utms = self.stime() term_list = [] for term_name in self.input('term_list').split(' '): if term_name == '': continue term_list.append(term_name) if len(term_list) > 10: self.flash(0, {'msg': '标签数量限制不能超过 10 个'}) return con_posts = self.dbase('posts') cur_posts = con_posts.cursor() con_terms = self.dbase('terms') cur_terms = con_terms.cursor() cur_posts.execute('select * from posts where post_id = ?', (post_id, )) post = cur_posts.fetchone() if not post: cur_posts.close() cur_terms.close() self.flash(0, '没有指定文章ID') return term_imap = {} term_ctms = self.stime() for term_name in term_list: cur_terms.execute('select term_id from terms where term_name = ?', (term_name ,)) term_id = cur_terms.fetchone() if term_id: term_id = term_id['term_id'] else: cur_terms.execute('insert or ignore into terms (term_name, term_ctms) values (?, ?)', (term_name , term_ctms, )) if cur_terms.lastrowid: term_id = cur_terms.lastrowid if term_id: term_imap[term_id] = term_name cur_posts.execute('select term_id from post_terms where post_id = ?', (post_id, )) post_tids = cur_posts.fetchall() cur_posts.execute('update posts set user_id=?,post_title=?,post_descp=?,post_author=?,post_source=?,post_summary=?,post_content=?,post_stat=?,post_rank=?,post_ptms=?,post_utms=? where post_id=?', \ (user['user_id'], post_title, post_descp, post_author, post_source, post_summary, post_content, post_stat, post_rank, post_ptms, post_utms, post_id,)) cur_posts.execute('delete from post_terms where post_id = ?', (post_id,)) if term_imap: for term_id in term_imap: cur_posts.execute('insert or ignore into post_terms (post_id, term_id) values (' + str(post_id) + ',' + str(term_id) + ')') if post_tids: cur_terms.execute('update terms set term_refc = term_refc - 1 where term_id in (' + ','.join([str(i['term_id']) for i in post_tids]) + ')') if term_imap: cur_terms.execute('update terms set term_refc = term_refc + 1 where term_id in (' + ','.join([str(i) for i in term_imap.keys()]) + ')') con_posts.commit() cur_posts.close() con_terms.commit() cur_terms.close() self.model('alogs').add(self.dbase('alogs'), '更新文章:' + str(post_id), user_ip = self.request.remote_ip, user_id = user['user_id'], user_name = user['user_name']) self.flash(1) except: self.flash(0)
bsd-3-clause
whn09/tensorflow
tensorflow/contrib/graph_editor/tests/util_test.py
154
6110
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for tensorflow.contrib.graph_editor.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.contrib import graph_editor as ge from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.ops import math_ops from tensorflow.python.platform import test class UtilTest(test.TestCase): def test_list_view(self): """Test for ge.util.ListView.""" l = [0, 1, 2] lv = ge.util.ListView(l) # Should not be the same id. self.assertIsNot(l, lv) # Should behave the same way than the original list. self.assertTrue(len(lv) == 3 and lv[0] == 0 and lv[1] == 1 and lv[2] == 2) # Should be read only. with self.assertRaises(TypeError): lv[0] = 0 def test_is_iterable(self): """Test for ge.util.is_iterable.""" self.assertTrue(ge.util.is_iterable([0, 1, 2])) self.assertFalse(ge.util.is_iterable(3)) def test_unique_graph(self): """Test for ge.util.check_graphs and ge.util.get_unique_graph.""" g0 = ops.Graph() with g0.as_default(): a0 = constant_op.constant(1) b0 = constant_op.constant(2) g1 = ops.Graph() with g1.as_default(): a1 = constant_op.constant(1) b1 = constant_op.constant(2) # Same graph, should be fine. self.assertIsNone(ge.util.check_graphs(a0, b0)) # Two different graphs, should assert. with self.assertRaises(ValueError): ge.util.check_graphs(a0, b0, a1, b1) # a0 and b0 belongs to the same graph, should be fine. self.assertEqual(ge.util.get_unique_graph([a0, b0]), g0) # Different graph, should raise an error. with self.assertRaises(ValueError): ge.util.get_unique_graph([a0, b0, a1, b1]) def test_make_list_of_op(self): """Test for ge.util.make_list_of_op.""" g0 = ops.Graph() with g0.as_default(): a0 = constant_op.constant(1) b0 = constant_op.constant(2) # Should extract the ops from the graph. self.assertEqual(len(ge.util.make_list_of_op(g0)), 2) # Should extract the ops from the tuple. self.assertEqual(len(ge.util.make_list_of_op((a0.op, b0.op))), 2) def test_make_list_of_t(self): """Test for ge.util.make_list_of_t.""" g0 = ops.Graph() with g0.as_default(): a0 = constant_op.constant(1) b0 = constant_op.constant(2) c0 = math_ops.add(a0, b0) # pylint: disable=unused-variable # Should extract the tensors from tre graph. self.assertEqual(len(ge.util.make_list_of_t(g0)), 3) # Should extract the tensors from the tuple self.assertEqual(len(ge.util.make_list_of_t((a0, b0))), 2) # Should extract the tensors and ignore the ops. self.assertEqual( len(ge.util.make_list_of_t( (a0, a0.op, b0), ignore_ops=True)), 2) def test_get_generating_consuming(self): """Test for ge.util.get_generating_ops and ge.util.get_generating_ops.""" g0 = ops.Graph() with g0.as_default(): a0 = constant_op.constant(1) b0 = constant_op.constant(2) c0 = math_ops.add(a0, b0) self.assertEqual(len(ge.util.get_generating_ops([a0, b0])), 2) self.assertEqual(len(ge.util.get_consuming_ops([a0, b0])), 1) self.assertEqual(len(ge.util.get_generating_ops([c0])), 1) self.assertEqual(ge.util.get_consuming_ops([c0]), []) def test_control_outputs(self): """Test for the ge.util.ControlOutputs class.""" g0 = ops.Graph() with g0.as_default(): a0 = constant_op.constant(1) b0 = constant_op.constant(2) x0 = constant_op.constant(3) with ops.control_dependencies([x0.op]): c0 = math_ops.add(a0, b0) # pylint: disable=unused-variable control_outputs = ge.util.ControlOutputs(g0).get_all() self.assertEqual(len(control_outputs), 1) self.assertEqual(len(control_outputs[x0.op]), 1) self.assertIs(list(control_outputs[x0.op])[0], c0.op) def test_scope(self): """Test simple path scope functionalities.""" self.assertEqual(ge.util.scope_finalize("foo/bar"), "foo/bar/") self.assertEqual(ge.util.scope_dirname("foo/bar/op"), "foo/bar/") self.assertEqual(ge.util.scope_basename("foo/bar/op"), "op") def test_placeholder(self): """Test placeholder functionalities.""" g0 = ops.Graph() with g0.as_default(): a0 = constant_op.constant(1, name="foo") # Test placeholder name. self.assertEqual(ge.util.placeholder_name(a0), "geph__foo_0") self.assertEqual(ge.util.placeholder_name(None), "geph") self.assertEqual( ge.util.placeholder_name( a0, scope="foo/"), "foo/geph__foo_0") self.assertEqual( ge.util.placeholder_name( a0, scope="foo"), "foo/geph__foo_0") self.assertEqual(ge.util.placeholder_name(None, scope="foo/"), "foo/geph") self.assertEqual(ge.util.placeholder_name(None, scope="foo"), "foo/geph") # Test placeholder creation. g0 = ops.Graph() with g0.as_default(): a0 = constant_op.constant(1, dtype=dtypes.float32, name="a0") c0 = math_ops.add( ge.util.make_placeholder_from_tensor(a0), ge.util.make_placeholder_from_dtype_and_shape(dtype=dtypes.float32)) self.assertEqual(c0.op.inputs[0].op.name, "geph__a0_0") self.assertEqual(c0.op.inputs[1].op.name, "geph") if __name__ == "__main__": test.main()
apache-2.0
Telestream/telestream-cloud-python-sdk
telestream_cloud_notifications_sdk/test/test_params.py
1
1740
# coding: utf-8 """ Notifications API Notifications # noqa: E501 The version of the OpenAPI document: 2.1.0 Contact: [email protected] Generated by: https://openapi-generator.tech """ from __future__ import absolute_import import unittest import datetime import telestream_cloud_notifications from telestream_cloud_notifications.models.params import Params # noqa: E501 from telestream_cloud_notifications.rest import ApiException class TestParams(unittest.TestCase): """Params unit test stubs""" def setUp(self): pass def tearDown(self): pass def make_instance(self, include_optional): """Test Params include_option is a boolean, when False only required params are included, when True both required and optional params are included """ # model = telestream_cloud_notifications.models.params.Params() # noqa: E501 if include_optional : return Params( addresses = [ '0' ], url = '0', method = 'GET', retries = 56, content_type = 'application/json', topic_arn = '0', role_arn = '0', topic_endpoint = '0', access_key = '0', project_id = '0', topic_name = '0' ) else : return Params( ) def testParams(self): """Test Params""" inst_req_only = self.make_instance(include_optional=False) inst_req_and_optional = self.make_instance(include_optional=True) if __name__ == '__main__': unittest.main()
mit
ppqm/fitting
fitter/fit.py
1
9239
import sklearn import sklearn.model_selection import time import itertools import functools import multiprocessing as mp import os import subprocess import time import copy import json import numpy as np import pandas as pd from numpy.linalg import norm from scipy.optimize import minimize import rmsd import joblib import mndo cachedir = '.pycache' memory = joblib.Memory(cachedir, verbose=0) def get_penalty(calc_properties, refs_properties, property_weights, keys=None): penalty = 0.0 n = 0 return penalty @memory.cache def load_data(): reference = "../dataset-qm9/reference.csv" reference = pd.read_csv(reference) filenames = reference["name"] # energies = reference["binding energy"] atoms_list = [] coord_list = [] charges = [] titles = [] for filename in filenames: titles.append(filename) charges.append(0) filename = "../dataset-qm9/xyz/" + filename + ".xyz" atoms, coord = rmsd.get_coordinates_xyz(filename) atoms_list.append(atoms) coord_list.append(coord) offset = 10+100 to_offset = 110+100 atoms_list = atoms_list[offset:to_offset] coord_list = coord_list[offset:to_offset] charges = charges[offset:to_offset] titles = titles[offset:to_offset] reference = reference[offset:to_offset] return atoms_list, coord_list, charges, titles, reference def minimize_parameters(mols_atoms, mols_coords, reference_properties, start_parameters, n_procs=1, method="PM3", ignore_keys=['DD2','DD3','PO1','PO2','PO3','PO9','HYF','CORE','EISOL','FN1','FN2','FN3','GSCAL','BETAS','ZS']): """ """ n_mols = len(mols_atoms) # Select header header = """{:} 1SCF MULLIK PRECISE charge={{:}} iparok=1 jprint=5 nextmol=-1 TITLE {{:}}""" header = header.format(method) filename = "_tmp_optimizer" inputtxt = mndo.get_inputs(mols_atoms, mols_coords, np.zeros(n_mols), range(n_mols), header=header) with open(filename, 'w') as f: f.write(inputtxt) # Select atom parameters to optimize atoms = [np.unique(atom) for atom in mols_atoms] atoms = list(itertools.chain(*atoms)) atoms = np.unique(atoms) parameters_values = [] parameters_keys = [] parameters = {} # Select parameters for atom in atoms: atom_params = start_parameters[atom] current = {} for key in atom_params: if key in ignore_keys: continue value = atom_params[key] current[key] = value parameters_values.append(value) parameters_keys.append([atom, key]) parameters[atom] = current # Define penalty func def penalty(params, debug=True): for param, key in zip(params, parameters_keys): parameters[key[0]][key[1]] = param mndo.set_params(parameters) properties_list = mndo.calculate(filename) calc_energies = np.array([properties["energy"] for properties in properties_list]) diff = reference_properties - calc_energies idxs = np.argwhere(np.isnan(diff)) diff[idxs] = 700.0 error = np.abs(diff) error = error.mean() if debug: print("penalty: {:10.2f}".format(error)) return error def penalty_properties(properties_list): calc_energies = np.array([properties["energy"] for properties in properties_list]) diff = reference_properties - calc_energies idxs = np.argwhere(np.isnan(diff)) diff[idxs] = 700.0 error = np.abs(diff) error = error.mean() return error def jacobian(params, dh=10**-5, debug=False): # TODO Parallelt grad = [] for i, p in enumerate(params): dparams = copy.deepcopy(params) dparams[i] += dh forward = penalty(dparams, debug=False) dparams[i] -= (2.0 * dh) backward = penalty(dparams, debug=False) de = forward - backward grad.append(de/(2.0 * dh)) grad = np.array(grad) if debug: nm = np.linalg.norm(grad) print("penalty grad: {:10.2f}".format(nm)) return grad def jacobian_parallel(params, dh=10**-5, procs=1): """ """ for param, key in zip(params, parameters_keys): parameters[key[0]][key[1]] = param params_grad = mndo.numerical_jacobian(inputtxt, parameters, n_procs=procs, dh=dh) grad = [] for atom, key in parameters_keys: forward_mols, backward_mols = params_grad[atom][key] penalty_forward = penalty_properties(forward_mols) penalty_backward = penalty_properties(backward_mols) de = penalty_forward - penalty_backward grad.append(de/(2.0 * dh)) grad = np.array(grad) return grad start_error = penalty(parameters_values) # check grad dh = 10**-5 t = time.time() grad = jacobian(parameters_values, dh=dh) nm = np.linalg.norm(grad) secs = time.time() - t print("penalty grad: {:10.2f} time: {:10.2f}".format(nm, secs)) t = time.time() grad = jacobian_parallel(parameters_values, procs=2, dh=dh) nm = np.linalg.norm(grad) secs = time.time() - t print("penalty grad: {:10.2f} time: {:10.2f}".format(nm, secs)) quit() res = minimize(penalty, parameters_values, method="L-BFGS-B", jac=jacobian, options={"maxiter": 1000, "disp": True}) parameters_values = res.x error = penalty(parameters_values) for param, key in zip(parameters_values, parameters_keys): parameters[key[0]][key[1]] = param end_parameters = parameters return end_parameters, error def learning_curve( mols_atoms, mols_coords, reference_properties, start_parameters): fold_five = sklearn.model_selection.KFold(n_splits=5, random_state=42, shuffle=True) n_items = len(mols_atoms) X = list(range(n_items)) score = [] for train_idxs, test_idxs in fold_five.split(X): train_atoms = [mols_atoms[i] for i in train_idxs] train_coords = [mols_coords[i] for i in train_idxs] train_properties = reference_properties[train_idxs] test_atoms = [mols_atoms[i] for i in test_idxs] test_coords = [mols_coords[i] for i in test_idxs] test_properties = reference_properties[test_idxs] train_parameters, train_error = minimize_parameters(train_atoms, train_coords, train_properties, start_parameters) print(train_parameters) quit() return def main(): import argparse import sys description = """""" parser = argparse.ArgumentParser( usage='%(prog)s [options]', description=description, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('-f', '--format', action='store', help='', metavar='fmt') parser.add_argument('-s', '--settings', action='store', help='', metavar='json') parser.add_argument('-p', '--parameters', action='store', help='', metavar='json') parser.add_argument('-o', '--results_parameters', action='store', help='', metavar='json') parser.add_argument('--methods', action='store', help='', metavar='str') args = parser.parse_args() mols_atoms, mols_coords, mols_charges, titles, reference = load_data() ref_energies = reference.iloc[:,1].tolist() ref_energies = np.array(ref_energies) with open(args.parameters, 'r') as f: start_params = f.read() start_params = json.loads(start_params) # end_params = minimize_parameters(mols_atoms, mols_coords, ref_energies, start_params) end_params = learning_curve(mols_atoms, mols_coords, ref_energies, start_params) print(end_params) quit() # TODO select reference # TODO prepare input file filename = "_tmp_optimizer" txt = mndo.get_inputs(atoms_list, coord_list, charges, titles) f = open(filename, 'w') f.write(txt) f.close() # TODO prepare parameters parameters = np.array([ -99., -77., 2., -32., 3., ]) parameter_keys = [ ["O", "USS"], ["O", "UPP"], ["O", "ZP"], ["O", "BETAP"], ["O", "ALP"], ] parameter_dict = {} parameter_dict["O"] = {} # TODO calculate penalty # properties_list = mndo.calculate(filename) def penalty(params): for param, key in zip(params, parameter_keys): parameter_dict[key[0]][key[1]] = param mndo.set_params(parameter_dict) properties_list = mndo.calculate(filename) calc_energies = np.array([properties["energy"] for properties in properties_list]) diff = ref_energies - calc_energies idxs = np.argwhere(np.isnan(diff)) diff[idxs] = 700.0 error = diff.mean() return error print(penalty(parameters)) status = minimize(penalty, parameters, method="L-BFGS-B", options={"maxiter": 1000, "disp": True}) print() print(status) # TODO optimize return if __name__ == "__main__": main()
cc0-1.0
TheWardoctor/Wardoctors-repo
script.module.nanscrapers/lib/nanscrapers/scraperplugins/streamthis.py
6
3297
import re import requests import difflib import xbmc from ..scraper import Scraper from ..common import clean_title User_Agent = 'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:56.0) Gecko/20100101 Firefox/56.0' headers = {'User-Agent':User_Agent} class streamthis(Scraper): domains = ['streamthis.tv'] name = "streamthis" sources = [] def __init__(self): self.base_link = 'http://streamthis.tv' self.search_link = '/index.php?menu=search&query=' def scrape_episode(self, title, show_year, year, season, episode, imdb, tvdb, debrid = False): try: start_url = self.base_link+self.search_link+title.replace(' ','+') html = requests.get(start_url,headers=headers).content match = re.compile('<div class="col xs12 s6 m3 l2 animated bounceInUp">.+?<a href="(.+?)".+?<p class="smallttl"> (.+?)</p>.+?<i class="fa fa-calendar-o" aria-hidden="true"></i> (.+?)</div>',re.DOTALL).findall(html) for url,name,year in match: if clean_title(name) in clean_title(title): if year == show_year: html2 = requests.get(url,headers=headers).content ep_match = re.compile('<a class="collection-item black-text".+?href="(.+?)".+?<b>(.+?)</b>').findall(html2) for url2,episodes in ep_match: if len(season)==1: season ='0'+season if len(episode)==1: episode ='0'+episode ep_check = 'S'+season+'E'+episode if ep_check == episodes: self.get_sources(url2) return self.sources except: pass return [] def scrape_movie(self, title, year, imdb, debrid = False): try: start_url = self.base_link+self.search_link+title.replace(' ','+') html = requests.get(start_url,headers=headers).content match = re.compile('<div class="col xs12 s6 m3 l2 animated bounceInUp">.+?<a href="(.+?)".+?<p class="smallttl"> (.+?)</p>.+?<i class="fa fa-calendar-o" aria-hidden="true"></i> (.+?)</div>',re.DOTALL).findall(html) for url,name,movie_year in match: if clean_title(name) in clean_title(title): if year == movie_year: self.get_sources(url) return self.sources except: pass return[] def get_sources(self,url2): try: print url2 html = requests.get(url2,headers=headers).content match = re.findall('<a class="collection-item black-text" href="(.+?)" target="_blank"><img src=".+?"> (.+?)</a>',html) for link,name in match: if name.lower() == 'full hd 1080p': pass else: self.sources.append({'source': name, 'quality': 'SD', 'scraper': self.name, 'url': link,'direct': False}) except: pass #streamthis().scrape_episode('the blacklist','2013','2017','2','4','','') #streamthis().scrape_movie('moana','2016','')
apache-2.0
konstruktoid/ansible-upstream
lib/ansible/plugins/action/ce_template.py
95
3891
# # Copyright 2015 Peter Sprygada <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import time import glob from ansible.module_utils.six.moves.urllib.parse import urlsplit from ansible.module_utils._text import to_text from ansible.plugins.action.ce import ActionModule as _ActionModule class ActionModule(_ActionModule): def run(self, tmp=None, task_vars=None): try: self._handle_template() except (ValueError, AttributeError) as exc: return dict(failed=True, msg=exc.message) result = super(ActionModule, self).run(tmp, task_vars) del tmp # tmp no longer has any effect if self._task.args.get('backup') and result.get('__backup__'): # User requested backup and no error occurred in module. # NOTE: If there is a parameter error, __backup__ key may not be in results. self._write_backup(task_vars['inventory_hostname'], result['__backup__']) if '__backup__' in result: del result['__backup__'] return result def _get_working_path(self): cwd = self._loader.get_basedir() if self._task._role is not None: cwd = self._task._role._role_path return cwd def _write_backup(self, host, contents): backup_path = self._get_working_path() + '/backup' if not os.path.exists(backup_path): os.mkdir(backup_path) for fn in glob.glob('%s/%s*' % (backup_path, host)): os.remove(fn) tstamp = time.strftime("%Y-%m-%d@%H:%M:%S", time.localtime(time.time())) filename = '%s/%s_config.%s' % (backup_path, host, tstamp) open(filename, 'w').write(contents) def _handle_template(self): src = self._task.args.get('src') if not src: raise ValueError('missing required arguments: src') working_path = self._get_working_path() if os.path.isabs(src) or urlsplit(src).scheme: source = src else: source = self._loader.path_dwim_relative(working_path, 'templates', src) if not source: source = self._loader.path_dwim_relative(working_path, src) if not os.path.exists(source): return try: with open(source, 'r') as f: template_data = to_text(f.read()) except IOError: return dict(failed=True, msg='unable to load src file') # Create a template search path in the following order: # [working_path, self_role_path, dependent_role_paths, dirname(source)] searchpath = [working_path] if self._task._role is not None: searchpath.append(self._task._role._role_path) if hasattr(self._task, "_block:"): dep_chain = self._task._block.get_dep_chain() if dep_chain is not None: for role in dep_chain: searchpath.append(role._role_path) searchpath.append(os.path.dirname(source)) self._templar.environment.loader.searchpath = searchpath self._task.args['src'] = self._templar.template(template_data)
gpl-3.0
Fat-Zer/FreeCAD_sf_master
src/Mod/Show/mTempoVis.py
22
24926
#/*************************************************************************** # * Copyright (c) Victor Titov (DeepSOIC) * # * ([email protected]) 2016 * # * * # * This file is part of the FreeCAD CAx development system. * # * * # * This library is free software; you can redistribute it and/or * # * modify it under the terms of the GNU Library General Public * # * License as published by the Free Software Foundation; either * # * version 2 of the License, or (at your option) any later version. * # * * # * This library is distributed in the hope that it will be useful, * # * but WITHOUT ANY WARRANTY; without even the implied warranty of * # * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * # * GNU Library General Public License for more details. * # * * # * You should have received a copy of the GNU Library General Public * # * License along with this library; see the file COPYING.LIB. If not, * # * write to the Free Software Foundation, Inc., 59 Temple Place, * # * Suite 330, Boston, MA 02111-1307, USA * # * * # ***************************************************************************/ # module is named mTempoVis, because Show.TimpoVis exposes the class as its member, and hides the module TempoVis.py. from . import Containers from . import TVStack import FreeCAD as App if App.GuiUp: import FreeCADGui as Gui Wrn = lambda msg: App.Console.PrintWarning(msg + "\n") Err = lambda msg: App.Console.PrintError(msg + "\n") Log = lambda msg: App.Console.PrintLog(msg + "\n") from copy import copy S_EMPTY = 0 # TV is initialized, but no changes were done through it S_ACTIVE = 1 # TV has something to be undone S_RESTORED = 2 # TV has been restored S_INTERNAL = 3 # TV instance is being used by another TV instance as a redo data storage def _printTraceback(err): import sys if err is sys.exc_info()[1]: import traceback tb = traceback.format_exc() Log(tb) class MAINSTACK(object): '''it's just a default value definition for TV constructor''' pass class JUST_SAVE(object): '''it's just a default value meaning "save current scene value but don't modify anything"''' pass class TempoVis(object): '''TempoVis - helper object to save visibilities of objects before doing some GUI editing, hiding or showing relevant stuff during edit, and then restoring all visibilities after editing. Constructors: TempoVis(document, stack = MAINSTACK, **kwargs): creates a new TempoVis. document: required. Objects not belonging to the document can't be modified via TempoVis. stack: optional. Which stack to insert this new TV into. Can be: a TVStack instance (then, the new TV is added to the top of the stack), MAINSTACK special value (a global stack for the document will be used), or None (then, the TV is not in any stack, and can be manually instertd into one if desired). Any additional keyword args are assigned as attributes. You can use it to immediately set a tag, for example.''' document = None stack = None # reference to stack this TV is in data = None # dict. key = ("class_id","key"), value = instance of SceneDetail data_requested = None #same as data, but stores (wanted) values passed to modify() state = S_EMPTY tag = '' #stores any user-defined string for identification purposes def _init_attrs(self): '''initialize member variables to empty values (needed because we can't use mutable initial values when initializing member variables in class definition)''' self.data = {} self.data_requested = {} #<core interface> def __init__(self, document, stack = MAINSTACK, **kwargs): self._init_attrs() self.document = document if stack is MAINSTACK: stack = TVStack.mainStack(document) if stack is None: pass else: stack.insert(self) for key,val in kwargs.items(): setattr(self, key, val) def __del__(self): if self.state == S_ACTIVE: self.restore(ultimate= True) def has(self, detail): '''has(self, detail): returns True if this TV has this detail value saved. example: tv.has(VProperty(obj, "Visibility"))''' return detail.full_key in self.data def stored_val(self, detail): '''stored_val(self, detail): returns value of detail remembered by this TV. If not, raises KeyError.''' return self.data[detail.full_key].data def save(self, detail, mild_restore = False): '''save(detail, mild_restore = False):saves the scene detail to be restored. The detail is saved only once; repeated calls are ignored. mild_restore: internal, do not use.''' self._change() if not detail.full_key in self.data: #not saved yet tv1, curr = self._value_after(detail, query_scene= True) self.data[detail.full_key] = copy(curr) self.data[detail.full_key].mild_restore = mild_restore else: #saved already. Change restore policy, if necessary. stored_dt = self.data[detail.full_key] if not mild_restore: stored_dt.mild_restore = False def modify(self, detail, mild_restore = None): '''modify(detail, mild_restore = True): modifies scene detail through this TV. The value is provided as an instance of SceneDetail implementation. The procedure takes care to account for the stack - that is, if in a TV applied later than this one this detail was changed too, the value saved therein is altered, rather than applied to the scene. mild_restore: if True, when restoring later, checks if the value was changed by user after last call to modify(), and doesn't restore if it was changed. Example: tv.modify(VProperty(obj, "Visibility", True))''' self._change() if mild_restore is not None: detail.mild_restore = mild_restore # save current self.save(detail, detail.mild_restore) # apply tv1, curr = self._value_after(detail) if tv1 is not None: tv1.data[detail.full_key].data = detail.data else: detail.apply_data(detail.data) # and record. if detail.mild_restore: self.data_requested[detail.full_key] = copy(detail) def restoreDetail(self, detail, ultimate = False): '''restoreDetail(detail, ultimate = False): restores a specific scene detail. ultimate: if true, the saved value is cleaned out. If the detail is not found, nothing is done. ''' if not self.has(detail): return self._restore_detail(detail) if ultimate: self.forgetDetail(detail) def forgetDetail(self, detail): '''forgetDetail(detail): ditches a saved detail value, making the change done through this TV permanent.''' self.data.pop(detail.full_key, None) self.data_requested.pop(detail.full_key, None) def forget(self): '''forget(self): clears this TV, making all changes done through it permanent. Also, withdraws the TV from the stack.''' self.state = S_EMPTY self.data = {} if self.is_in_stack: self.stack.withdraw(self) def restore(self, ultimate = True): '''restore(ultimate = True): undoes all changes done through this tempovis / restores saved scene details. ultimate: if true, the saved values are cleaned out, and the TV is withdrawn from the stack. If false, the TV will still remember stuff, and restore can be called again. ''' if self.state == S_RESTORED: return if self.state != S_INTERNAL and ultimate: self.state = S_RESTORED for key, detail in self.data.items(): try: self._restoreDetail(detail) except Exception as err: Err("TempoVis.restore: failed to restore detail {key}: {err}".format(key= key, err= str(err))) _printTraceback(err) if ultimate: self.data = {} if self.is_in_stack: self.stack.withdraw(self) #</core interface> #<stack interface> def _inserted(self, stack, index): '''calles when this tv is inserted into a stack''' self.stack = stack def _withdrawn(self, stack, index): '''calles when this tv is withdrawn from a stack''' self.stack = None @property def is_in_stack(self): return self.stack is not None #</stack interface> #<convenience functions> def modifyVPProperty(self, doc_obj_or_list, prop_names, new_value = JUST_SAVE, mild_restore = None): '''modifyVPProperty(doc_obj_or_list, prop_names, new_value = JUST_SAVE, mild_restore = None): modifies prop_name property of ViewProvider of doc_obj_or_list, and remembers original value of the property. Original values will be restored upon TempoVis deletion, or call to restore(). mild_restore: test if user changed the value manually when restoring the TV.''' if self.state == S_RESTORED: Wrn("Attempting to use a TV that has been restored. There must be a problem with code.") return if not hasattr(doc_obj_or_list, '__iter__'): doc_obj_or_list = [doc_obj_or_list] if not isinstance(prop_names,(list,tuple)): prop_names = [prop_names] for doc_obj in doc_obj_or_list: for prop_name in prop_names: if not hasattr(doc_obj.ViewObject, prop_name): Wrn("TempoVis: object {obj} has no attribute {attr}. Skipped." .format(obj= doc_obj.Name, attr= prop_name)) continue # Because the introduction of external objects, we shall now # accept objects from all opened documents. # # if doc_obj.Document is not self.document: #ignore objects from other documents # raise ValueError("Document object to be modified does not belong to document TempoVis was made for.") from .SceneDetails.VProperty import VProperty if new_value is JUST_SAVE: if mild_restore: Wrn("TempoVis: can't just save a value for mild restore. Saving for hard restore.") self.save(VProperty(doc_obj, prop_name, new_value)) else: self.modify(VProperty(doc_obj, prop_name, new_value), mild_restore) def restoreVPProperty(self, doc_obj_or_list, prop_names): '''restoreVPProperty(doc_obj_or_list, prop_name, new_value): restores specific property changes.''' from .SceneDetails.VProperty import VProperty if not hasattr(doc_obj_or_list, '__iter__'): doc_obj_or_list = [doc_obj_or_list] if not isinstance(prop_names,(tuple,list)): prop_names = [prop_names] for doc_obj in doc_obj_or_list: for prop_name in prop_names: try: self.restoreDetail(VProperty(doc_obj, prop_name)) except Exception as err: Err("TempoVis.restore: failed to restore detail {key}: {err}".format(key= key, err= str(err))) _printTraceback(err) def saveBodyVisibleFeature(self, doc_obj_or_list): """saveBodyVisibleFeature(self, doc_obj_or_list): saves Visibility of currently visible feature, for every body of PartDesign features in the provided list.""" if not hasattr(doc_obj_or_list, '__iter__'): doc_obj_or_list = [doc_obj_or_list] objs = [] bodies = set() for obj in doc_obj_or_list: body = getattr(obj,'_Body',None) if not body or body in bodies: continue bodies.add(body) feature = getattr(body,'VisibleFeature',None) if feature: objs.append(feature) self.modifyVPProperty(objs, 'Visibility', JUST_SAVE) return objs def show(self, doc_obj_or_list, links_too = True, mild_restore = None): '''show(doc_obj_or_list, links_too = True): shows objects (sets their Visibility to True). doc_obj_or_list can be a document object, or a list of document objects. If links_too is True, all Links of the objects are also hidden, by setting LinkVisibility attribute of each object.''' doc_obj_or_list = self._3D_objects(doc_obj_or_list) self.saveBodyVisibleFeature(doc_obj_or_list) #fix implicit hiding of other features by PartDesign not being recorded to TV self.modifyVPProperty(doc_obj_or_list, 'Visibility', True, mild_restore) if links_too: self.modifyVPProperty(doc_obj_or_list, 'LinkVisibility', True, mild_restore) def hide(self, doc_obj_or_list, links_too = True, mild_restore = None): '''hide(doc_obj_or_list): hides objects (sets their Visibility to False). doc_obj_or_list can be a document object, or a list of document objects''' doc_obj_or_list = self._3D_objects(doc_obj_or_list) # no need to saveBodyVisibleFeature here, as no implicit showing will happen self.modifyVPProperty(doc_obj_or_list, 'Visibility', False, mild_restore) if links_too: self.modifyVPProperty(doc_obj_or_list, 'LinkVisibility', False, mild_restore) def get_all_dependent(self, doc_obj, subname = None): '''get_all_dependent(doc_obj, subname = None): gets all objects that depend on doc_obj. Containers and Links (if subname) required for visibility of the object are excluded from the list.''' from . import Containers from .Containers import isAContainer from .DepGraphTools import getAllDependencies, getAllDependent if subname: # a link-path was provided. doc_obj has nothing to do with the object we want # to collect dependencies from. So, replace it with the one pointed by link-path. cnt_chain = doc_obj.getSubObjectList(subname) doc_obj = cnt_chain[-1].getLinkedObject() # cnt_chain can either end with the object (e.g. if a sketch is in a part, and # a link is to a part), or it may be a Link object (if we have a straight or # even nested Link to the sketch). # # I don't know why do we need that isAContainer check here, but I'm leaving it, # realthunder must be knowing his business --DeepSOIC cnt_chain = [ o for o in cnt_chain if o==cnt_chain[-1] or isAContainer(o, links_too= True) ] else: cnt_chain = Containers.ContainerChain(doc_obj) return [o for o in getAllDependent(doc_obj) if not o in cnt_chain] def hide_all_dependent(self, doc_obj): '''hide_all_dependent(doc_obj): hides all objects that depend on doc_obj. Groups, Parts and Bodies are not hidden by this.''' self.hide(self._3D_objects(self.get_all_dependent(doc_obj))) def show_all_dependent(self, doc_obj): '''show_all_dependent(doc_obj): shows all objects that depend on doc_obj. This method is probably useless.''' from .DepGraphTools import getAllDependencies, getAllDependent self.show(self._3D_objects(getAllDependent(doc_obj))) def restore_all_dependent(self, doc_obj): '''show_all_dependent(doc_obj): restores original visibilities of all dependent objects.''' from .DepGraphTools import getAllDependencies, getAllDependent self.restoreVPProperty( getAllDependent(doc_obj), ('Visibility', 'LinkVisibility') ) def hide_all_dependencies(self, doc_obj): '''hide_all_dependencies(doc_obj): hides all objects that doc_obj depends on (directly and indirectly).''' from .DepGraphTools import getAllDependencies, getAllDependent self.hide(self._3D_objects(getAllDependencies(doc_obj))) def show_all_dependencies(self, doc_obj): '''show_all_dependencies(doc_obj): shows all objects that doc_obj depends on (directly and indirectly). This method is probably useless.''' from .DepGraphTools import getAllDependencies, getAllDependent self.show(self._3D_objects(getAllDependencies(doc_obj))) def saveCamera(self, vw = None): self._change() from .SceneDetails.Camera import Camera self.save(Camera(self.document)) def restoreCamera(self, ultimate = False): from .SceneDetails.Camera import Camera dt = Camera(self.document) self.restoreDetail(dt, ultimate) def setUnpickable(self, doc_obj_or_list, actual_pick_style = 2): #2 is coin.SoPickStyle.UNPICKABLE '''setUnpickable(doc_obj_or_list, actual_pick_style = 2): sets object unpickable (transparent to clicks). doc_obj_or_list: object or list of objects to alter (App) actual_pick_style: optional parameter, specifying the actual pick style: 0 = regular, 1 = bounding box, 2 (default) = unpickable. Implementation detail: uses SoPickStyle node. If viewprovider already has a node of this type as direct child, one is used. Otherwise, new one is created and inserted as the very first node, and remains there even after restore()/deleting tempovis. ''' from .SceneDetails.Pickability import Pickability from .ShowUtils import is3DObject if not hasattr(doc_obj_or_list, '__iter__'): doc_obj_or_list = [doc_obj_or_list] for doc_obj in doc_obj_or_list: if not is3DObject(doc_obj): continue dt = Pickability(doc_obj, actual_pick_style) self.modify(dt) def clipPlane(self, doc_obj_or_list, enable, placement, offset = 0.02): '''clipPlane(doc_obj_or_list, enable, placement, offset): slices off the object with a clipping plane. doc_obj_or_list: object or list of objects to alter (App) enable: True if you want clipping, False if you want to remove clipping: placement: XY plane of local coordinates of the placement is the clipping plane. The placement must be in document's global coordinate system. offset: shifts the plane. Positive offset reveals more of the object. Implementation detail: uses SoClipPlane node. If viewprovider already has a node of this type as direct child, one is used. Otherwise, new one is created and inserted as the very first node. The node is left, but disabled when tempovis is restoring.''' from .SceneDetails.ObjectClipPlane import ObjectClipPlane from .ShowUtils import is3DObject if not hasattr(doc_obj_or_list, '__iter__'): doc_obj_or_list = [doc_obj_or_list] for doc_obj in doc_obj_or_list: if not is3DObject(doc_obj): continue dt = ObjectClipPlane(doc_obj, enable, placement, offset) self.modify(dt) @staticmethod def allVisibleObjects(aroundObject): '''allVisibleObjects(aroundObject): returns list of objects that have to be toggled invisible for only aroundObject to remain. If a whole container can be made invisible, it is returned, instead of its child objects.''' from .ShowUtils import is3DObject from . import Containers chain = Containers.VisGroupChain(aroundObject) result = [] for i in range(len(chain)): cnt = chain[i] cnt_next = chain[i+1] if i+1 < len(chain) else aroundObject container = Containers.Container(cnt) for obj in container.getVisGroupChildren(): if not is3DObject(obj): continue if obj is not cnt_next: if container.isChildVisible(obj): result.append(obj) return result def sketchClipPlane(self, sketch, enable = None): '''sketchClipPlane(sketch, enable = None): Clips all objects by plane of sketch. If enable argument is omitted, calling the routine repeatedly will toggle clipping plane.''' from .SceneDetails.ClipPlane import ClipPlane editDoc = Gui.editDocument() if editDoc is None: doc = sketch.Document pla = sketch.getGlobalPlacement() else: doc = editDoc.Document pla = App.Placement(editDoc.EditingTransform) toggle = {False: 0, True: 1, None: -1}[enable] self.modify(ClipPlane(doc, toggle, pla, 0.02)) def activateWorkbench(self, wb_name): from .SceneDetails.Workbench import Workbench self.modify(Workbench(wb_name)) #</convenience functions> #<internals> def _restoreDetail(self, detail): p = self.data[detail.full_key] tv1, curr = self._value_after(detail, query_scene= p.mild_restore) if p.mild_restore: if self.data_requested[detail.full_key] != curr: #the value on the scene doesn't match what was requested through TV. User probably changed it. We don't want to mess it up. self._purge_milds(detail) return if tv1 is None: # no other TV has changed this detail later, apply to the scene detail.apply_data(p.data) else: #modify saved detail of higher TV tv1.data[detail.full_key].data = p.data def _purge_milds(self, detail): """_purge_milds(detail): wipes out detail from earlier TVs if the detail is mild-restore.""" if not self.is_in_stack: return seq_before, seq_after = self.stack.getSplitSequence(self) for tv in reversed(seq_before): if tv.has(detail): if tv.data[detail.full_key].mild_restore: tv.forgetDetail(detail) else: #hard-restoring value encountered, stop break def _change(self): '''to be called whenever anything is done that is to be restored later.''' if self.state == S_EMPTY: self.state = S_ACTIVE if self.state == S_RESTORED: Wrn("Attempting to use a TV that has been restored. There must be a problem with code.") self.tv_redo = None def _value_after(self, detail, query_scene = False): '''_value_current(detail): returns (tv, detail1). SceneDetail instance holds "current" value of scene detail (current from the context of this TV; i.e. either the current scene status, or the saved state from upper TVs). If no upper TV has saved the detail value, returns either (None, None), or (None, detail1) if query_scene is True, where detail1 holds value from the scene.''' def scene_value(): if query_scene: cpy = copy(detail) cpy.data = cpy.scene_value() return (None, cpy) else: return (None, None) if self.is_in_stack: va = self.stack.value_after(self, detail) if va is None: return scene_value() else: return va else: return scene_value() def _3D_objects(self, doc_obj_or_list): """_3D_objects(doc_obj_or_list): returns list of objects that are in 3d view.""" from .ShowUtils import is3DObject if not hasattr(doc_obj_or_list, '__iter__'): doc_obj_or_list = [doc_obj_or_list] return [obj for obj in doc_obj_or_list if is3DObject(obj)] def __getstate__(self): return None def __setstate__(self, state): self._init_attrs()
lgpl-2.1
dillia23/code-dot-org
blockly-core/appengine/report.py
22
1677
"""Blockly Demo: Report Copyright 2012 Google Inc. http://blockly.googlecode.com/ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ """Store reports about code written by users. """ __author__ = "[email protected] (Ellen Spertus)" import cgi import logging from google.appengine.ext import db print "Content-type: text/plain\n" class Report(db.Model): identifier = db.FloatProperty() application = db.StringProperty() date = db.DateTimeProperty(auto_now_add=True) level = db.IntegerProperty() result = db.IntegerProperty() # StringProperty is limited to 500 characters, so use TextProperty. program = db.TextProperty() # Catch errors extracting form fields or converting to numeric types. # Let any other errors propagate up. try: forms = cgi.FieldStorage() identifier = float(forms["id"].value) application = forms["app"].value level = int(forms["level"].value) result = int(forms["result"].value) program = forms["program"].value row = Report(identifier = identifier, application = application, level = level, result = result, program = program) row.put() except ValueError, KeyError: logging.error("Unable to extract all form fields.")
apache-2.0
cheral/orange3
Orange/tests/sql/test_filter.py
11
32037
# Test methods with long descriptive names can omit docstrings # pylint: disable=missing-docstring import unittest from Orange.data.sql.table import SqlTable, SqlRowInstance from Orange.data import filter, domain from Orange.tests.sql.base import PostgresTest, sql_version, sql_test @sql_test class TestIsDefinedSql(PostgresTest): def setUp(self): self.data = [ [1, 2, 3, None, 'm'], [2, 3, 1, 4, 'f'], [None, None, None, None, None], [7, None, 3, None, 'f'], ] conn, self.table_name = self.create_sql_table(self.data) self.table = SqlTable(conn, self.table_name, inspect_values=True) def tearDown(self): self.drop_sql_table(self.table_name) def test_on_all_columns(self): filtered_data = filter.IsDefined()(self.table) correct_data = [row for row in self.data if all(row)] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_selected_columns(self): filtered_data = filter.IsDefined(columns=[0])(self.table) correct_data = [row for row in self.data if row[0]] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_all_columns_negated(self): filtered_data = filter.IsDefined(negate=True)(self.table) correct_data = [row for row in self.data if not all(row)] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_selected_columns_negated(self): filtered_data = \ filter.IsDefined(negate=True, columns=[4])(self.table) correct_data = [row for row in self.data if not row[4]] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_can_inherit_is_defined_filter(self): filtered_data = filter.IsDefined(columns=[1])(self.table) filtered_data = filtered_data[:, 4] correct_data = [[row[4]]for row in self.data if row[1]] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) @sql_test class TestHasClass(PostgresTest): def setUp(self): self.data = [ [1, 2, 3, None, 'm'], [2, 3, 1, 4, 'f'], [None, None, None, None, None], [7, None, 3, None, 'f'], ] self.conn, self.table_name = self.create_sql_table(self.data) table = SqlTable(self.conn, self.table_name, inspect_values=True) variables = table.domain.variables new_table = table.copy() new_table.domain = domain.Domain(variables[:-1], variables[-1:]) self.table = new_table def tearDown(self): self.drop_sql_table(self.table_name) def test_has_class(self): filtered_data = filter.HasClass()(self.table) correct_data = [row for row in self.data if row[-1]] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_negated(self): filtered_data = filter.HasClass(negate=True)(self.table) correct_data = [row for row in self.data if not row[-1]] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) @sql_test class TestSameValueSql(PostgresTest): def setUp(self): self.data = [ [1, 2, 3, 'a', 'm'], [2, None, 1, 'a', 'f'], [None, 3, 1, 'b', None], [2, 2, 3, 'b', 'f'], ] self.conn, self.table_name = self.create_sql_table(self.data) self.table = SqlTable(self.conn, self.table_name, inspect_values=True) def tearDown(self): self.drop_sql_table(self.table_name) def test_on_continuous_attribute(self): filtered_data = filter.SameValue(0, 1)(self.table) correct_data = [row for row in self.data if row[0] == 1] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_on_continuous_attribute_with_unknowns(self): filtered_data = filter.SameValue(1, 2)(self.table) correct_data = [row for row in self.data if row[1] == 2] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_on_continuous_attribute_with_unknown_value(self): filtered_data = filter.SameValue(1, None)(self.table) correct_data = [row for row in self.data if row[1] is None] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_on_continuous_attribute_negated(self): filtered_data = filter.SameValue(0, 1, negate=True)(self.table) correct_data = [row for row in self.data if not row[0] == 1] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_on_discrete_attribute(self): filtered_data = filter.SameValue(3, 'a')(self.table) correct_data = [row for row in self.data if row[3] == 'a'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_on_discrete_attribute_with_unknown_value(self): filtered_data = filter.SameValue(4, None)(self.table) correct_data = [row for row in self.data if row[4] is None] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_on_discrete_attribute_with_unknowns(self): filtered_data = filter.SameValue(4, 'm')(self.table) correct_data = [row for row in self.data if row[4] == 'm'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_on_discrete_attribute_negated(self): filtered_data = filter.SameValue(3, 'a', negate=True)(self.table) correct_data = [row for row in self.data if not row[3] == 'a'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_on_discrete_attribute_value_passed_as_int(self): values = self.table.domain[3].values filtered_data = filter.SameValue(3, 0, negate=True)(self.table) correct_data = [row for row in self.data if not row[3] == values[0]] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_on_discrete_attribute_value_passed_as_float(self): values = self.table.domain[3].values filtered_data = filter.SameValue(3, 0., negate=True)(self.table) correct_data = [row for row in self.data if not row[3] == values[0]] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) @sql_test class TestValuesSql(PostgresTest): def setUp(self): self.data = [ [1, 2, 3, 'a', 'm'], [2, None, 1, 'a', 'f'], [None, 3, 1, 'b', None], [2, 2, 3, 'b', 'f'], ] conn, self.table_name = self.create_sql_table(self.data) self.table = SqlTable(conn, self.table_name, inspect_values=True) def tearDown(self): self.drop_sql_table(self.table_name) def test_values_filter_with_no_conditions(self): with self.assertRaises(ValueError): filtered_data = filter.Values([])(self.table) def test_discrete_value_filter(self): filtered_data = filter.Values(conditions=[ filter.FilterDiscrete(3, ['a']) ])(self.table) correct_data = [row for row in self.data if row[3] in ['a']] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_discrete_value_filter_with_multiple_values(self): filtered_data = filter.Values(conditions=[ filter.FilterDiscrete(3, ['a', 'b']) ])(self.table) correct_data = [row for row in self.data if row[3] in ['a', 'b']] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_discrete_value_filter_with_None(self): filtered_data = filter.Values(conditions=[ filter.FilterDiscrete(3, None) ])(self.table) correct_data = [row for row in self.data if row[3] is not None] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_continuous_value_filter_equal(self): filtered_data = filter.Values(conditions=[ filter.FilterContinuous(0, filter.FilterContinuous.Equal, 1) ])(self.table) correct_data = [row for row in self.data if row[0] == 1] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_continuous_value_filter_not_equal(self): filtered_data = filter.Values(conditions=[ filter.FilterContinuous(0, filter.FilterContinuous.NotEqual, 1) ])(self.table) correct_data = [row for row in self.data if row[0] != 1] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_continuous_value_filter_less(self): filtered_data = filter.Values(conditions=[ filter.FilterContinuous(0, filter.FilterContinuous.Less, 2) ])(self.table) correct_data = [row for row in self.data if row[0] is not None and row[0] < 2] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_continuous_value_filter_less_equal(self): filtered_data = filter.Values(conditions=[ filter.FilterContinuous(0, filter.FilterContinuous.LessEqual, 2) ])(self.table) correct_data = [row for row in self.data if row[0] is not None and row[0] <= 2] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_continuous_value_filter_greater(self): filtered_data = filter.Values(conditions=[ filter.FilterContinuous(0, filter.FilterContinuous.Greater, 1) ])(self.table) correct_data = [row for row in self.data if row[0] is not None and row[0] > 1] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_continuous_value_filter_greater_equal(self): filtered_data = filter.Values(conditions=[ filter.FilterContinuous(0, filter.FilterContinuous.GreaterEqual, 1) ])(self.table) correct_data = [row for row in self.data if row[0] is not None and row[0] >= 1] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_continuous_value_filter_between(self): filtered_data = filter.Values(conditions=[ filter.FilterContinuous(0, filter.FilterContinuous.Between, 1, 2) ])(self.table) correct_data = [row for row in self.data if row[0] is not None and 1 <= row[0] <= 2] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_continuous_value_filter_outside(self): filtered_data = filter.Values(conditions=[ filter.FilterContinuous(0, filter.FilterContinuous.Outside, 2, 3) ])(self.table) correct_data = [row for row in self.data if row[0] is not None and not 2 <= row[0] <= 3] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_continuous_value_filter_isdefined(self): filtered_data = filter.Values(conditions=[ filter.FilterContinuous(1, filter.FilterContinuous.IsDefined) ])(self.table) correct_data = [row for row in self.data if row[1] is not None] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) @sql_test class TestFilterStringSql(PostgresTest): def setUp(self): self.data = [ [w] for w in "Lorem ipsum dolor sit amet, consectetur adipiscing" "elit. Vestibulum vel dolor nulla. Etiam elit lectus, mollis nec" "mattis sed, pellentesque in turpis. Vivamus non nisi dolor. Etiam" "lacinia dictum purus, in ullamcorper ante vulputate sed. Nullam" "congue blandit elementum. Donec blandit laoreet posuere. Proin" "quis augue eget tortor posuere mollis. Fusce vestibulum bibendum" "neque at convallis. Donec iaculis risus volutpat malesuada" "vehicula. Ut cursus tempor massa vulputate lacinia. Pellentesque" "eu tortor sed diam placerat porttitor et volutpat risus. In" "vulputate rutrum lacus ac sagittis. Suspendisse interdum luctus" "sem auctor commodo.".split(' ')] + [[None], [None]] self.conn, self.table_name = self.create_sql_table(self.data) self.table = SqlTable(self.conn, self.table_name) def tearDown(self): self.drop_sql_table(self.table_name) def test_filter_string_is_defined(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.IsDefined) ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_equal(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.Equal, 'in') ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] == 'in'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_equal_case_insensitive_value(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.Equal, 'In', case_sensitive=False) ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] == 'in'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_equal_case_insensitive_data(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.Equal, 'donec', case_sensitive=False) ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] == 'Donec'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_not_equal(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.NotEqual, 'in') ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] != 'in'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_not_equal_case_insensitive_value(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.NotEqual, 'In', case_sensitive=False) ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] != 'in'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_not_equal_case_insensitive_data(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.NotEqual, 'donec', case_sensitive=False) ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] != 'Donec'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_less(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.Less, 'A') ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None and row[0] < 'A'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_less_case_insensitive_value(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.Less, 'In', case_sensitive=False) ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None and row[0].lower() < 'in'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_less_case_insensitive_data(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.Less, 'donec', case_sensitive=False) ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None and row[0].lower() < 'donec'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_less_equal(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.LessEqual, 'A') ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None and row[0] <= 'A'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_less_equal_case_insensitive_value(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.LessEqual, 'In', case_sensitive=False) ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None and row[0].lower() <= 'in'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_less_equal_case_insensitive_data(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.LessEqual, 'donec', case_sensitive=False) ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None and row[0].lower() <= 'donec'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_greater(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.Greater, 'volutpat') ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None and row[0] > 'volutpat'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_greater_case_insensitive_value(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.Greater, 'In', case_sensitive=False) ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None and row[0].lower() > 'in'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_greater_case_insensitive_data(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.Greater, 'donec', case_sensitive=False) ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None and row[0].lower() > 'donec'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_greater_equal(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.GreaterEqual, 'volutpat') ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None and row[0] >= 'volutpat'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_greater_equal_case_insensitive_value(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.GreaterEqual, 'In', case_sensitive=False) ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None and row[0].lower() >= 'in'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_greater_equal_case_insensitive_data(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.GreaterEqual, 'donec', case_sensitive=False) ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None and row[0].lower() >= 'donec'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_between(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.Between, 'a', 'c') ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None and 'a' <= row[0] <= 'c'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_between_case_insensitive_value(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.Between, 'I', 'O', case_sensitive=False) ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None and 'i' < row[0].lower() <= 'o'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_between_case_insensitive_data(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.Between, 'i', 'O', case_sensitive=False) ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None and 'i' <= row[0].lower() <= 'o'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_contains(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.Contains, 'et') ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None and 'et' in row[0]] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_contains_case_insensitive_value(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.Contains, 'eT', case_sensitive=False) ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None and 'et' in row[0].lower()] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_contains_case_insensitive_data(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.Contains, 'do', case_sensitive=False) ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None and 'do' in row[0].lower()] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_outside(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.Outside, 'am', 'di') ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None and not 'am' < row[0] < 'di'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_outside_case_insensitive(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.Outside, 'd', 'k', case_sensitive=False) ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None and not 'd' < row[0].lower() < 'k'] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_starts_with(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.StartsWith, 'D') ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None and row[0].startswith('D')] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_starts_with_case_insensitive(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.StartsWith, 'D', case_sensitive=False) ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None and row[0].lower().startswith('d')] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_ends_with(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.EndsWith, 's') ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None and row[0].endswith('s')] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_ends_with_case_insensitive(self): filtered_data = filter.Values(conditions=[ filter.FilterString(-1, filter.FilterString.EndsWith, 'S', case_sensitive=False) ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] is not None and row[0].lower().endswith('s')] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_list(self): filtered_data = filter.Values(conditions=[ filter.FilterStringList(-1, ['et', 'in']) ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] in ['et', 'in']] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_list_case_insensitive_value(self): filtered_data = filter.Values(conditions=[ filter.FilterStringList(-1, ['Et', 'In'], case_sensitive=False) ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] in ['et', 'in']] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data) def test_filter_string_list_case_insensitive_data(self): filtered_data = filter.Values(conditions=[ filter.FilterStringList(-1, ['donec'], case_sensitive=False) ])(self.table) correct_data = [SqlRowInstance(filtered_data.domain, row) for row in self.data if row[0] in ['Donec']] self.assertEqual(len(filtered_data), len(correct_data)) self.assertSequenceEqual(filtered_data, correct_data)
bsd-2-clause
laurentgo/pants
src/python/pants/java/nailgun_executor.py
5
11618
# coding=utf-8 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import hashlib import logging import os import re import select import threading import time from six import string_types from twitter.common.collections import maybe_list from pants.base.build_environment import get_buildroot from pants.java.executor import Executor, SubprocessExecutor from pants.java.nailgun_client import NailgunClient from pants.pantsd.process_manager import ProcessGroup, ProcessManager from pants.util.dirutil import safe_open logger = logging.getLogger(__name__) class NailgunProcessGroup(ProcessGroup): _NAILGUN_KILL_LOCK = threading.Lock() def __init__(self): ProcessGroup.__init__(self, name='nailgun') # TODO: this should enumerate the .pids dir first, then fallback to ps enumeration (& warn). def _iter_nailgun_instances(self, everywhere=False): def predicate(proc): if proc.name() == NailgunExecutor._PROCESS_NAME: if not everywhere: return NailgunExecutor._PANTS_NG_ARG in proc.cmdline() else: return any(arg.startswith(NailgunExecutor._PANTS_NG_ARG_PREFIX) for arg in proc.cmdline()) return self.iter_instances(predicate) def killall(self, everywhere=False): """Kills all nailgun servers started by pants. :param bool everywhere: If ``True``, kills all pants-started nailguns on this machine; otherwise restricts the nailguns killed to those started for the current build root. """ with self._NAILGUN_KILL_LOCK: for proc in self._iter_nailgun_instances(everywhere): logger.info('killing nailgun server pid={pid}'.format(pid=proc.pid)) proc.terminate() # TODO: Once we integrate standard logging into our reporting framework, we can consider making # some of the log.debug() below into log.info(). Right now it just looks wrong on the console. class NailgunExecutor(Executor, ProcessManager): """Executes java programs by launching them in nailgun server. If a nailgun is not available for a given set of jvm args and classpath, one is launched and re-used for the given jvm args and classpath on subsequent runs. """ # 'NGServer 0.9.1 started on 127.0.0.1, port 53785.' _NG_PORT_REGEX = re.compile(r'.*\s+port\s+(\d+)\.$') # Used to identify if we own a given nailgun server. _PANTS_NG_ARG_PREFIX = b'-Dpants.buildroot' _PANTS_FINGERPRINT_ARG_PREFIX = b'-Dpants.nailgun.fingerprint' _PANTS_OWNER_ARG_PREFIX = b'-Dpants.nailgun.owner' _PANTS_NG_ARG = '='.join((_PANTS_NG_ARG_PREFIX, get_buildroot())) _NAILGUN_SPAWN_LOCK = threading.Lock() _SELECT_WAIT = 1 _PROCESS_NAME = b'java' def __init__(self, identity, workdir, nailgun_classpath, distribution=None, ins=None, connect_timeout=10, connect_attempts=5): Executor.__init__(self, distribution=distribution) ProcessManager.__init__(self, name=identity, process_name=self._PROCESS_NAME) if not isinstance(workdir, string_types): raise ValueError('Workdir must be a path string, not: {workdir}'.format(workdir=workdir)) self._identity = identity self._workdir = workdir self._ng_stdout = os.path.join(workdir, 'stdout') self._ng_stderr = os.path.join(workdir, 'stderr') self._nailgun_classpath = maybe_list(nailgun_classpath) self._ins = ins self._connect_timeout = connect_timeout self._connect_attempts = connect_attempts def __str__(self): return 'NailgunExecutor({identity}, dist={dist}, pid={pid} socket={socket})'.format( identity=self._identity, dist=self._distribution, pid=self.pid, socket=self.socket) def _parse_fingerprint(self, cmdline): fingerprints = [cmd.split('=')[1] for cmd in cmdline if cmd.startswith( self._PANTS_FINGERPRINT_ARG_PREFIX + '=')] return fingerprints[0] if fingerprints else None @property def fingerprint(self): """This provides the nailgun fingerprint of the running process otherwise None.""" if self.cmdline: return self._parse_fingerprint(self.cmdline) def _create_owner_arg(self, workdir): # Currently the owner is identified via the full path to the workdir. return '='.join((self._PANTS_OWNER_ARG_PREFIX, workdir)) def _create_fingerprint_arg(self, fingerprint): return '='.join((self._PANTS_FINGERPRINT_ARG_PREFIX, fingerprint)) @staticmethod def _fingerprint(jvm_options, classpath, java_version): """Compute a fingerprint for this invocation of a Java task. :param list jvm_options: JVM options passed to the java invocation :param list classpath: The -cp arguments passed to the java invocation :param Revision java_version: return value from Distribution.version() :return: a hexstring representing a fingerprint of the java invocation """ digest = hashlib.sha1() # TODO(John Sirois): hash classpath contents? [digest.update(item) for item in (''.join(sorted(jvm_options)), ''.join(sorted(classpath)), repr(java_version))] return digest.hexdigest() def _runner(self, classpath, main, jvm_options, args, cwd=None): """Runner factory. Called via Executor.execute().""" command = self._create_command(classpath, main, jvm_options, args) class Runner(self.Runner): @property def executor(this): return self @property def command(self): return list(command) def run(this, stdout=None, stderr=None, cwd=None): nailgun = self._get_nailgun_client(jvm_options, classpath, stdout, stderr) try: logger.debug('Executing via {ng_desc}: {cmd}'.format(ng_desc=nailgun, cmd=this.cmd)) return nailgun(main, cwd, *args) except nailgun.NailgunError as e: self.terminate() raise self.Error('Problem launching via {ng_desc} command {main} {args}: {msg}' .format(ng_desc=nailgun, main=main, args=' '.join(args), msg=e)) return Runner() def _check_nailgun_state(self, new_fingerprint): running = self.is_alive() updated = running and (self.fingerprint != new_fingerprint or self.cmd != self._distribution.java) logging.debug('Nailgun {nailgun} state: updated={up!s} running={run!s} fingerprint={old_fp} ' 'new_fingerprint={new_fp} distribution={old_dist} new_distribution={new_dist}' .format(nailgun=self._identity, up=updated, run=running, old_fp=self.fingerprint, new_fp=new_fingerprint, old_dist=self.cmd, new_dist=self._distribution.java)) return running, updated def _get_nailgun_client(self, jvm_options, classpath, stdout, stderr): """This (somewhat unfortunately) is the main entrypoint to this class via the Runner. It handles creation of the running nailgun server as well as creation of the client.""" classpath = self._nailgun_classpath + classpath new_fingerprint = self._fingerprint(jvm_options, classpath, self._distribution.version) with self._NAILGUN_SPAWN_LOCK: running, updated = self._check_nailgun_state(new_fingerprint) if running and updated: logger.debug('Found running nailgun server that needs updating, killing {server}' .format(server=self._identity)) self.terminate() if (not running) or (running and updated): return self._spawn_nailgun_server(new_fingerprint, jvm_options, classpath, stdout, stderr) return self._create_ngclient(self.socket, stdout, stderr) def _await_socket(self, timeout): """Blocks for the nailgun subprocess to bind and emit a listening port in the nailgun stdout.""" with safe_open(self._ng_stdout, 'r') as ng_stdout: start_time = time.time() while 1: readable, _, _ = select.select([ng_stdout], [], [], self._SELECT_WAIT) if readable: line = ng_stdout.readline() # TODO: address deadlock risk here. try: return self._NG_PORT_REGEX.match(line).group(1) except AttributeError: pass if (time.time() - start_time) > timeout: raise NailgunClient.NailgunError( 'Failed to read nailgun output after {sec} seconds!'.format(sec=timeout)) def _create_ngclient(self, port, stdout, stderr): return NailgunClient(port=port, ins=self._ins, out=stdout, err=stderr, workdir=get_buildroot()) def ensure_connectable(self, nailgun): """Ensures that a nailgun client is connectable or raises NailgunError.""" attempt_count = 0 while 1: if attempt_count > self._connect_attempts: logger.debug('Failed to connect to ng after {count} attempts' .format(count=self._connect_attempts)) raise NailgunClient.NailgunError('Failed to connect to ng server.') try: sock = nailgun.try_connect() if sock: logger.debug('Connected to ng server {server!r}'.format(server=self)) return finally: sock.close() attempt_count += 1 time.sleep(self.WAIT_INTERVAL) def _spawn_nailgun_server(self, fingerprint, jvm_options, classpath, stdout, stderr): """Synchronously spawn a new nailgun server.""" # Truncate the nailguns stdout & stderr. self._write_file(self._ng_stdout, '') self._write_file(self._ng_stderr, '') jvm_options = jvm_options + [self._PANTS_NG_ARG, self._create_owner_arg(self._workdir), self._create_fingerprint_arg(fingerprint)] post_fork_child_opts = dict(fingerprint=fingerprint, jvm_options=jvm_options, classpath=classpath, stdout=stdout, stderr=stderr) logger.debug('Spawning nailgun server {i} with fingerprint={f}, jvm_options={j}, classpath={cp}' .format(i=self._identity, f=fingerprint, j=jvm_options, cp=classpath)) self.daemon_spawn(post_fork_child_opts=post_fork_child_opts) # Wait for and write the port information in the parent so we can bail on exception/timeout. self.await_pid(self._connect_timeout) self.write_socket(self._await_socket(self._connect_timeout)) logger.debug('Spawned nailgun server {i} with fingerprint={f}, pid={pid} port={port}' .format(i=self._identity, f=fingerprint, pid=self.pid, port=self.socket)) client = self._create_ngclient(self.socket, stdout, stderr) self.ensure_connectable(client) return client def post_fork_child(self, fingerprint, jvm_options, classpath, stdout, stderr): """Post-fork() child callback for ProcessManager.daemon_spawn().""" java = SubprocessExecutor(self._distribution) subproc = java.spawn(classpath=classpath, main='com.martiansoftware.nailgun.NGServer', jvm_options=jvm_options, args=[':0'], stdin=safe_open('/dev/null', 'r'), stdout=safe_open(self._ng_stdout, 'w'), stderr=safe_open(self._ng_stderr, 'w'), close_fds=True) self.write_pid(subproc.pid)
apache-2.0
advancedplotting/aplot
python/plotserv/api_annotations.py
1
8009
# Copyright (c) 2014-2015, Heliosphere Research LLC # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # 3. Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived from this # software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. """ Handles VIs in "api_annotations". """ import numpy as np from matplotlib import pyplot as plt from .core import resource from .terminals import remove_none from . import filters from . import errors @resource('text') def text(ctx, a): """ Display text on the plot """ plotid = a.plotid() x = a.float('x') y = a.float('y') s = a.string('s') relative = a.bool('coordinates') textprops = a.text() display = a.display() ctx.set(plotid) ax = plt.gca() # None-finite values here mean we skip the plot if x is None or y is None: return k = textprops._k() k.update(display._k()) k['clip_on'] = True if relative: k['transform'] = ax.transAxes remove_none(k) plt.text(x, y, s, **k) @resource('hline') def hline(ctx, a): """ Plot a horizontal line """ plotid = a.plotid() y = a.float('y') xmin = a.float('xmin') xmax = a.float('xmax') line = a.line() display = a.display() ctx.set(plotid) ctx.fail_if_polar() # Non-finite value provided if y is None: return k = { 'xmin': xmin, 'xmax': xmax, 'linewidth': line.width, 'linestyle': line.style, 'color': line.color if line.color is not None else 'k', } k.update(display._k()) remove_none(k) plt.axhline(y, **k) @resource('vline') def vline(ctx, a): """ Plot a vertical line """ plotid = a.plotid() x = a.float('x') ymin = a.float('ymin') ymax = a.float('ymax') line = a.line() display = a.display() ctx.set(plotid) ctx.fail_if_polar() # Non-finite value provided if x is None: return k = { 'ymin': ymin, 'ymax': ymax, 'linewidth': line.width, 'linestyle': line.style, 'color': line.color if line.color is not None else 'k', } k.update(display._k()) remove_none(k) plt.axvline(x, **k) @resource('colorbar') def colorbar(ctx, a): """ Display a colorbar """ plotid = a.plotid() label = a.string('label') ticks = a.dbl_1d('ticks') ticklabels = a.string_1d('ticklabels') ctx.set(plotid) # If no colormapped object has been plotted, MPL complains. # We permit this, and simply don't add the colorbar. if ctx.mappable is None: return c = plt.colorbar(ctx.mappable) # Don't bother setting an empty label if len(label) > 0: c.set_label(label) # Both specified if len(ticks) > 0 and len(ticklabels) > 0: ticks, ticklabels = filters.filter_1d(ticks, ticklabels) c.set_ticks(ticks) c.set_ticklabels(ticklabels) # Just ticks specified elif len(ticks) > 0: ticks = ticks[np.isfinite(ticks)] c.set_ticks(ticks) # Just ticklabels specified else: # Providing zero-length "ticks" array invokes auto-ticking, in which # case any ticklabels are ignored. pass @resource('legend') def legend(ctx, a): """ Represents Legend.vi. Note that there is no Positions enum on the Python side; the MPL values are hard-coded into the LabView control. """ POSITIONS = { 0: 0, 1: 1, 2: 9, 3: 2, 4: 6, 5: 3, 6: 8, 7: 4, 8: 7, 9: 10 } plotid = a.plotid() position = a.enum('position', POSITIONS) ctx.set(plotid) k = {'loc': position, 'fontsize': 'medium'} remove_none(k) if len(ctx.legend_entries) > 0: objects, labels = zip(*ctx.legend_entries) plt.legend(objects, labels, **k) @resource('label') def label(ctx, a): """ Title, X axis and Y axis labels. """ LOCATIONS = {0: 'title', 1: 'xlabel', 2: 'ylabel'} plotid = a.plotid() location = a.enum('kind', LOCATIONS) label = a.string('label') text = a.text() ctx.set(plotid) k = text._k() if location == 'title': plt.title(label, **k) elif location == 'xlabel': plt.xlabel(label, **k) elif location == 'ylabel': ctx.fail_if_polar() plt.ylabel(label, **k) else: pass @resource('circle') def circle(ctx, a): """ Draw a circle on a rectangular plot """ plotid = a.plotid() x = a.float('x') y = a.float('y') radius = a.float('radius') color = a.color('color') line = a.line() display = a.display() f = ctx.set(plotid) ctx.fail_if_polar() ctx.fail_if_log_symlog() # Like Text.vi, if any critical input is Nan we do nothing if x is None or y is None or radius is None: return # Catch this before MPL complains if radius <= 0: return k = { 'edgecolor': line.color, 'linestyle': line.style, 'linewidth': line.width, 'facecolor': color if color is not None else '#bbbbbb', } k.update(display._k()) remove_none(k) c = plt.Circle((x,y), radius, **k) f.gca().add_artist(c) @resource('rectangle') def rectangle(ctx, a): """ Draw a rectangle """ plotid = a.plotid() x = a.float('x') y = a.float('y') width = a.float('width') height = a.float('height') color = a.color('color') line = a.line() display = a.display() f = ctx.set(plotid) ctx.fail_if_symlog() # Like Text.vi, if any critical input is Nan we do nothing if x is None or y is None or width is None or height is None: return if width == 0 or height == 0: return k = { 'edgecolor': line.color, 'linestyle': line.style, 'linewidth': line.width, 'facecolor': color if color is not None else '#bbbbbb', } k.update(display._k()) remove_none(k) r = plt.Rectangle((x,y), width, height, **k) f.gca().add_artist(r)
bsd-3-clause
waseem18/oh-mainline
vendor/packages/gdata/src/gdata/blogger/data.py
61
4551
#!/usr/bin/env python # # Copyright (C) 2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Data model classes for parsing and generating XML for the Blogger API.""" __author__ = '[email protected] (Jeff Scudder)' import re import urlparse import atom.core import gdata.data LABEL_SCHEME = 'http://www.blogger.com/atom/ns#' THR_TEMPLATE = '{http://purl.org/syndication/thread/1.0}%s' BLOG_NAME_PATTERN = re.compile('(http://)(\w*)') BLOG_ID_PATTERN = re.compile('(tag:blogger.com,1999:blog-)(\w*)') BLOG_ID2_PATTERN = re.compile('tag:blogger.com,1999:user-(\d+)\.blog-(\d+)') POST_ID_PATTERN = re.compile( '(tag:blogger.com,1999:blog-)(\w*)(.post-)(\w*)') PAGE_ID_PATTERN = re.compile( '(tag:blogger.com,1999:blog-)(\w*)(.page-)(\w*)') COMMENT_ID_PATTERN = re.compile('.*-(\w*)$') class BloggerEntry(gdata.data.GDEntry): """Adds convenience methods inherited by all Blogger entries.""" def get_blog_id(self): """Extracts the Blogger id of this blog. This method is useful when contructing URLs by hand. The blog id is often used in blogger operation URLs. This should not be confused with the id member of a BloggerBlog. The id element is the Atom id XML element. The blog id which this method returns is a part of the Atom id. Returns: The blog's unique id as a string. """ if self.id.text: match = BLOG_ID_PATTERN.match(self.id.text) if match: return match.group(2) else: return BLOG_ID2_PATTERN.match(self.id.text).group(2) return None GetBlogId = get_blog_id def get_blog_name(self): """Finds the name of this blog as used in the 'alternate' URL. An alternate URL is in the form 'http://blogName.blogspot.com/'. For an entry representing the above example, this method would return 'blogName'. Returns: The blog's URL name component as a string. """ for link in self.link: if link.rel == 'alternate': return urlparse.urlparse(link.href)[1].split(".", 1)[0] return None GetBlogName = get_blog_name class Blog(BloggerEntry): """Represents a blog which belongs to the user.""" class BlogFeed(gdata.data.GDFeed): entry = [Blog] class BlogPost(BloggerEntry): """Represents a single post on a blog.""" def add_label(self, label): """Adds a label to the blog post. The label is represented by an Atom category element, so this method is shorthand for appending a new atom.Category object. Args: label: str """ self.category.append(atom.data.Category(scheme=LABEL_SCHEME, term=label)) AddLabel = add_label def get_post_id(self): """Extracts the postID string from the entry's Atom id. Returns: A string of digits which identify this post within the blog. """ if self.id.text: return POST_ID_PATTERN.match(self.id.text).group(4) return None GetPostId = get_post_id class BlogPostFeed(gdata.data.GDFeed): entry = [BlogPost] class BlogPage(BloggerEntry): """Represents a single page on a blog.""" def get_page_id(self): """Extracts the pageID string from entry's Atom id. Returns: A string of digits which identify this post within the blog. """ if self.id.text: return PAGE_ID_PATTERN.match(self.id.text).group(4) return None GetPageId = get_page_id class BlogPageFeed(gdata.data.GDFeed): entry = [BlogPage] class InReplyTo(atom.core.XmlElement): _qname = THR_TEMPLATE % 'in-reply-to' href = 'href' ref = 'ref' source = 'source' type = 'type' class Comment(BloggerEntry): """Blog post comment entry in a feed listing comments on a post or blog.""" in_reply_to = InReplyTo def get_comment_id(self): """Extracts the commentID string from the entry's Atom id. Returns: A string of digits which identify this post within the blog. """ if self.id.text: return COMMENT_ID_PATTERN.match(self.id.text).group(1) return None GetCommentId = get_comment_id class CommentFeed(gdata.data.GDFeed): entry = [Comment]
agpl-3.0
henriquegemignani/randovania
randovania/gui/main_window.py
1
25113
import functools import json import logging import os import platform import subprocess from functools import partial from pathlib import Path from typing import Optional, List from PySide2 import QtCore, QtWidgets, QtGui from PySide2.QtCore import QUrl, Signal, Qt from qasync import asyncSlot from randovania import VERSION from randovania.game_description.resources.trick_resource_info import TrickResourceInfo from randovania.games.game import RandovaniaGame from randovania.gui.generated.main_window_ui import Ui_MainWindow from randovania.gui.lib import common_qt_lib, async_dialog, theme from randovania.gui.lib.trick_lib import used_tricks, difficulties_for_trick from randovania.gui.lib.window_manager import WindowManager from randovania.interface_common import update_checker from randovania.interface_common.enum_lib import iterate_enum from randovania.interface_common.options import Options from randovania.interface_common.preset_manager import PresetManager from randovania.layout.layout_description import LayoutDescription from randovania.layout.trick_level import LayoutTrickLevel from randovania.resolver import debug _DISABLE_VALIDATION_WARNING = """ <html><head/><body> <p>While it sometimes throws errors, the validation is what guarantees that your seed is completable.<br/> Do <span style=" font-weight:600;">not</span> disable if you're uncomfortable with possibly unbeatable seeds. </p><p align="center">Are you sure you want to disable validation?</p></body></html> """ def _update_label_on_show(label: QtWidgets.QLabel, text: str): def showEvent(_): if label._delayed_text is not None: label.setText(label._delayed_text) label._delayed_text = None label._delayed_text = text label.showEvent = showEvent class MainWindow(WindowManager, Ui_MainWindow): newer_version_signal = Signal(str, str) options_changed_signal = Signal() _is_preview_mode: bool = False menu_new_version: Optional[QtWidgets.QAction] = None _current_version_url: Optional[str] = None _options: Options _data_visualizer: Optional[QtWidgets.QWidget] = None _map_tracker: QtWidgets.QWidget _preset_manager: PresetManager GameDetailsSignal = Signal(LayoutDescription) InitPostShowSignal = Signal() @property def _tab_widget(self): return self.main_tab_widget @property def preset_manager(self) -> PresetManager: return self._preset_manager @property def main_window(self) -> QtWidgets.QMainWindow: return self @property def is_preview_mode(self) -> bool: return self._is_preview_mode def __init__(self, options: Options, preset_manager: PresetManager, network_client, preview: bool): super().__init__() self.setupUi(self) self.setWindowTitle("Randovania {}".format(VERSION)) self._is_preview_mode = preview self.setAcceptDrops(True) common_qt_lib.set_default_window_icon(self) # Remove all hardcoded link color about_document: QtGui.QTextDocument = self.about_text_browser.document() about_document.setHtml(about_document.toHtml().replace("color:#0000ff;", "")) self.browse_racetime_label.setText(self.browse_racetime_label.text().replace("color:#0000ff;", "")) self.intro_label.setText(self.intro_label.text().format(version=VERSION)) self._preset_manager = preset_manager self.network_client = network_client if preview: debug.set_level(2) # Signals self.newer_version_signal.connect(self.display_new_version) self.options_changed_signal.connect(self.on_options_changed) self.GameDetailsSignal.connect(self._open_game_details) self.InitPostShowSignal.connect(self.initialize_post_show) self.intro_play_now_button.clicked.connect(lambda: self.welcome_tab_widget.setCurrentWidget(self.tab_play)) self.open_faq_button.clicked.connect(self._open_faq) self.open_database_viewer_button.clicked.connect(partial(self._open_data_visualizer_for_game, RandovaniaGame.PRIME2)) for game in RandovaniaGame: self.hint_item_names_game_combo.addItem(game.long_name, game) self.hint_location_game_combo.addItem(game.long_name, game) self.hint_item_names_game_combo.currentIndexChanged.connect(self._update_hints_text) self.hint_location_game_combo.currentIndexChanged.connect(self._update_hint_locations) self.import_permalink_button.clicked.connect(self._import_permalink) self.import_game_file_button.clicked.connect(self._import_spoiler_log) self.browse_racetime_button.clicked.connect(self._browse_racetime) self.create_new_seed_button.clicked.connect( lambda: self.welcome_tab_widget.setCurrentWidget(self.tab_create_seed)) # Menu Bar for action, game in ((self.menu_action_prime_1_data_visualizer, RandovaniaGame.PRIME1), (self.menu_action_prime_2_data_visualizer, RandovaniaGame.PRIME2), (self.menu_action_prime_3_data_visualizer, RandovaniaGame.PRIME3)): action.triggered.connect(partial(self._open_data_visualizer_for_game, game)) for action, game in ((self.menu_action_edit_prime_1, RandovaniaGame.PRIME1), (self.menu_action_edit_prime_2, RandovaniaGame.PRIME2), (self.menu_action_edit_prime_3, RandovaniaGame.PRIME3)): action.triggered.connect(partial(self._open_data_editor_for_game, game)) self.menu_action_item_tracker.triggered.connect(self._open_item_tracker) self.menu_action_map_tracker.triggered.connect(self._on_menu_action_map_tracker) self.menu_action_edit_existing_database.triggered.connect(self._open_data_editor_prompt) self.menu_action_validate_seed_after.triggered.connect(self._on_validate_seed_change) self.menu_action_timeout_generation_after_a_time_limit.triggered.connect(self._on_generate_time_limit_change) self.menu_action_dark_mode.triggered.connect(self._on_menu_action_dark_mode) self.menu_action_open_auto_tracker.triggered.connect(self._open_auto_tracker) self.menu_action_previously_generated_games.triggered.connect(self._on_menu_action_previously_generated_games) self.menu_action_layout_editor.triggered.connect(self._on_menu_action_layout_editor) self.menu_prime_1_trick_details.aboutToShow.connect(self._create_trick_details_prime_1) self.menu_prime_2_trick_details.aboutToShow.connect(self._create_trick_details_prime_2) self.menu_prime_3_trick_details.aboutToShow.connect(self._create_trick_details_prime_3) # Setting this event only now, so all options changed trigger only once options.on_options_changed = self.options_changed_signal.emit self._options = options self.main_tab_widget.setCurrentIndex(0) def closeEvent(self, event): self.generate_seed_tab.stop_background_process() super().closeEvent(event) def dragEnterEvent(self, event: QtGui.QDragEnterEvent): from randovania.layout.preset_migration import VersionedPreset valid_extensions = [ LayoutDescription.file_extension(), VersionedPreset.file_extension(), ] valid_extensions_with_dot = { f".{extension}" for extension in valid_extensions } for url in event.mimeData().urls(): ext = os.path.splitext(url.toLocalFile())[1] if ext in valid_extensions_with_dot: event.acceptProposedAction() return def dropEvent(self, event: QtGui.QDropEvent): from randovania.layout.preset_migration import VersionedPreset for url in event.mimeData().urls(): path = Path(url.toLocalFile()) if path.suffix == f".{LayoutDescription.file_extension()}": self.open_game_details(LayoutDescription.from_file(path)) return elif path.suffix == f".{VersionedPreset.file_extension()}": self.main_tab_widget.setCurrentWidget(self.welcome_tab) self.welcome_tab_widget.setCurrentWidget(self.tab_create_seed) self.generate_seed_tab.import_preset_file(path) return def showEvent(self, event: QtGui.QShowEvent): self.InitPostShowSignal.emit() # Delayed Initialization @asyncSlot() async def initialize_post_show(self): self.InitPostShowSignal.disconnect(self.initialize_post_show) logging.info("Will initialize things in post show") await self._initialize_post_show_body() logging.info("Finished initializing post show") async def _initialize_post_show_body(self): logging.info("Will load OnlineInteractions") from randovania.gui.main_online_interaction import OnlineInteractions logging.info("Creating OnlineInteractions...") self.online_interactions = OnlineInteractions(self, self.preset_manager, self.network_client, self, self._options) logging.info("Will load GenerateSeedTab") from randovania.gui.generate_seed_tab import GenerateSeedTab logging.info("Creating GenerateSeedTab...") self.generate_seed_tab = GenerateSeedTab(self, self, self._options) logging.info("Running GenerateSeedTab.setup_ui") self.generate_seed_tab.setup_ui() # Update hints text logging.info("Will _update_hints_text") self._update_hints_text() logging.info("Will hide hint locations combo") self.hint_location_game_combo.setVisible(False) self.hint_location_game_combo.setCurrentIndex(1) logging.info("Will update for modified options") with self._options: self.on_options_changed() def _update_hints_text(self): from randovania.gui.lib import hints_text hints_text.update_hints_text(self.hint_item_names_game_combo.currentData(), self.hint_item_names_tree_widget) def _update_hint_locations(self): from randovania.gui.lib import hints_text hints_text.update_hint_locations(self.hint_location_game_combo.currentData(), self.hint_tree_widget) # Generate Seed def _open_faq(self): self.main_tab_widget.setCurrentWidget(self.help_tab) self.help_tab_widget.setCurrentWidget(self.tab_faq) async def generate_seed_from_permalink(self, permalink): from randovania.interface_common.status_update_lib import ProgressUpdateCallable from randovania.gui.dialog.background_process_dialog import BackgroundProcessDialog def work(progress_update: ProgressUpdateCallable): from randovania.interface_common import simplified_patcher layout = simplified_patcher.generate_layout(progress_update=progress_update, permalink=permalink, options=self._options) progress_update(f"Success! (Seed hash: {layout.shareable_hash})", 1) return layout new_layout = await BackgroundProcessDialog.open_for_background_task(work, "Creating a game...") self.open_game_details(new_layout) @asyncSlot() async def _import_permalink(self): from randovania.gui.dialog.permalink_dialog import PermalinkDialog dialog = PermalinkDialog() result = await async_dialog.execute_dialog(dialog) if result == QtWidgets.QDialog.Accepted: permalink = dialog.get_permalink_from_field() await self.generate_seed_from_permalink(permalink) def _import_spoiler_log(self): json_path = common_qt_lib.prompt_user_for_input_game_log(self) if json_path is not None: layout = LayoutDescription.from_file(json_path) self.open_game_details(layout) @asyncSlot() async def _browse_racetime(self): from randovania.gui.dialog.racetime_browser_dialog import RacetimeBrowserDialog dialog = RacetimeBrowserDialog() if not await dialog.refresh(): return result = await async_dialog.execute_dialog(dialog) if result == QtWidgets.QDialog.Accepted: await self.generate_seed_from_permalink(dialog.permalink) def open_game_details(self, layout: LayoutDescription): self.GameDetailsSignal.emit(layout) def _open_game_details(self, layout: LayoutDescription): from randovania.gui.seed_details_window import SeedDetailsWindow details_window = SeedDetailsWindow(self, self._options) details_window.update_layout_description(layout) details_window.show() self.track_window(details_window) # Releases info async def request_new_data(self): from randovania.interface_common import github_releases_data await self._on_releases_data(await github_releases_data.get_releases()) async def _on_releases_data(self, releases: Optional[List[dict]]): import markdown current_version = update_checker.strict_current_version() last_changelog = self._options.last_changelog_displayed all_change_logs, new_change_logs, version_to_display = update_checker.versions_to_display_for_releases( current_version, last_changelog, releases) if version_to_display is not None: self.display_new_version(version_to_display) if all_change_logs: changelog_tab = QtWidgets.QWidget() changelog_tab.setObjectName("changelog_tab") changelog_tab_layout = QtWidgets.QVBoxLayout(changelog_tab) changelog_tab_layout.setContentsMargins(0, 0, 0, 0) changelog_tab_layout.setObjectName("changelog_tab_layout") changelog_scroll_area = QtWidgets.QScrollArea(changelog_tab) changelog_scroll_area.setWidgetResizable(True) changelog_scroll_area.setObjectName("changelog_scroll_area") changelog_scroll_contents = QtWidgets.QWidget() changelog_scroll_contents.setGeometry(QtCore.QRect(0, 0, 489, 337)) changelog_scroll_contents.setObjectName("changelog_scroll_contents") changelog_scroll_layout = QtWidgets.QVBoxLayout(changelog_scroll_contents) changelog_scroll_layout.setObjectName("changelog_scroll_layout") for entry in all_change_logs: changelog_label = QtWidgets.QLabel(changelog_scroll_contents) _update_label_on_show(changelog_label, markdown.markdown(entry)) changelog_label.setObjectName("changelog_label") changelog_label.setWordWrap(True) changelog_scroll_layout.addWidget(changelog_label) changelog_scroll_area.setWidget(changelog_scroll_contents) changelog_tab_layout.addWidget(changelog_scroll_area) self.help_tab_widget.addTab(changelog_tab, "Change Log") if new_change_logs: await async_dialog.message_box(self, QtWidgets.QMessageBox.Information, "What's new", markdown.markdown("\n".join(new_change_logs))) with self._options as options: options.last_changelog_displayed = current_version def display_new_version(self, version: update_checker.VersionDescription): if self.menu_new_version is None: self.menu_new_version = QtWidgets.QAction("", self) self.menu_new_version.triggered.connect(self.open_version_link) self.menu_bar.addAction(self.menu_new_version) self.menu_new_version.setText("New version available: {}".format(version.tag_name)) self._current_version_url = version.html_url def open_version_link(self): if self._current_version_url is None: raise RuntimeError("Called open_version_link, but _current_version_url is None") QtGui.QDesktopServices.openUrl(QUrl(self._current_version_url)) # Options def on_options_changed(self): self.menu_action_validate_seed_after.setChecked(self._options.advanced_validate_seed_after) self.menu_action_timeout_generation_after_a_time_limit.setChecked( self._options.advanced_timeout_during_generation) self.menu_action_dark_mode.setChecked(self._options.dark_mode) self.generate_seed_tab.on_options_changed(self._options) theme.set_dark_theme(self._options.dark_mode) # Menu Actions def _open_data_visualizer_for_game(self, game: RandovaniaGame): self.open_data_visualizer_at(None, None, game) def open_data_visualizer_at(self, world_name: Optional[str], area_name: Optional[str], game: RandovaniaGame = RandovaniaGame.PRIME2, ): from randovania.gui.data_editor import DataEditorWindow data_visualizer = DataEditorWindow.open_internal_data(game, False) self._data_visualizer = data_visualizer if world_name is not None: data_visualizer.focus_on_world(world_name) if area_name is not None: data_visualizer.focus_on_area(area_name) self._data_visualizer.show() def _open_data_editor_for_game(self, game: RandovaniaGame): from randovania.gui.data_editor import DataEditorWindow self._data_editor = DataEditorWindow.open_internal_data(game, True) self._data_editor.show() def _open_data_editor_prompt(self): from randovania.gui.data_editor import DataEditorWindow database_path = common_qt_lib.prompt_user_for_database_file(self) if database_path is None: return with database_path.open("r") as database_file: self._data_editor = DataEditorWindow(json.load(database_file), database_path, False, True) self._data_editor.show() @asyncSlot() async def _on_menu_action_map_tracker(self): dialog = QtWidgets.QInputDialog(self) dialog.setWindowTitle("Map Tracker") dialog.setLabelText("Select preset used for the tracker.") dialog.setComboBoxItems([preset.name for preset in self._preset_manager.all_presets]) dialog.setTextValue(self._options.selected_preset_name) result = await async_dialog.execute_dialog(dialog) if result == QtWidgets.QDialog.Accepted: preset = self._preset_manager.preset_for_name(dialog.textValue()) self.open_map_tracker(preset.get_preset().configuration) def open_map_tracker(self, configuration: "EchoesConfiguration"): from randovania.gui.tracker_window import TrackerWindow, InvalidLayoutForTracker try: self._map_tracker = TrackerWindow(self._options.tracker_files_path, configuration) except InvalidLayoutForTracker as e: QtWidgets.QMessageBox.critical( self, "Unsupported configuration for Tracker", str(e) ) return self._map_tracker.show() def _open_item_tracker(self): # Importing this at root level seems to crash linux tests :( from PySide2.QtWebEngineWidgets import QWebEngineView tracker_window = QtWidgets.QMainWindow() tracker_window.setWindowTitle("Item Tracker") tracker_window.resize(370, 380) web_view = QWebEngineView(tracker_window) tracker_window.setCentralWidget(web_view) self.web_view = web_view def update_window_icon(): tracker_window.setWindowIcon(web_view.icon()) web_view.iconChanged.connect(update_window_icon) web_view.load(QUrl("https://spaghettitoastbook.github.io/echoes/tracker/")) tracker_window.show() self._item_tracker_window = tracker_window # Difficulties stuff def _exec_trick_details(self, popup: "TrickDetailsPopup"): self._trick_details_popup = popup self._trick_details_popup.setWindowModality(Qt.WindowModal) self._trick_details_popup.open() def _open_trick_details_popup(self, game, trick: TrickResourceInfo, level: LayoutTrickLevel): from randovania.gui.dialog.trick_details_popup import TrickDetailsPopup self._exec_trick_details(TrickDetailsPopup(self, self, game, trick, level)) def _create_trick_details_prime_1(self): self.menu_prime_1_trick_details.aboutToShow.disconnect(self._create_trick_details_prime_1) self._setup_difficulties_menu(RandovaniaGame.PRIME1, self.menu_prime_1_trick_details) def _create_trick_details_prime_2(self): self.menu_prime_2_trick_details.aboutToShow.disconnect(self._create_trick_details_prime_2) self._setup_difficulties_menu(RandovaniaGame.PRIME2, self.menu_prime_2_trick_details) def _create_trick_details_prime_3(self): self.menu_prime_3_trick_details.aboutToShow.disconnect(self._create_trick_details_prime_3) self._setup_difficulties_menu(RandovaniaGame.PRIME3, self.menu_prime_3_trick_details) def _setup_difficulties_menu(self, game: RandovaniaGame, menu: QtWidgets.QMenu): from randovania.game_description import default_database game = default_database.game_description_for(game) tricks_in_use = used_tricks(game) menu.clear() for trick in sorted(game.resource_database.trick, key=lambda _trick: _trick.long_name): if trick not in tricks_in_use: continue trick_menu = QtWidgets.QMenu(self) trick_menu.setTitle(trick.long_name) menu.addAction(trick_menu.menuAction()) used_difficulties = difficulties_for_trick(game, trick) for i, trick_level in enumerate(iterate_enum(LayoutTrickLevel)): if trick_level in used_difficulties: difficulty_action = QtWidgets.QAction(self) difficulty_action.setText(trick_level.long_name) trick_menu.addAction(difficulty_action) difficulty_action.triggered.connect( functools.partial(self._open_trick_details_popup, game, trick, trick_level)) # ========== @asyncSlot() async def _on_validate_seed_change(self): old_value = self._options.advanced_validate_seed_after new_value = self.menu_action_validate_seed_after.isChecked() if old_value and not new_value: box = QtWidgets.QMessageBox(self) box.setWindowTitle("Disable validation?") box.setText(_DISABLE_VALIDATION_WARNING) box.setIcon(QtWidgets.QMessageBox.Warning) box.setStandardButtons(QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No) box.setDefaultButton(QtWidgets.QMessageBox.No) user_response = await async_dialog.execute_dialog(box) if user_response != QtWidgets.QMessageBox.Yes: self.menu_action_validate_seed_after.setChecked(True) return with self._options as options: options.advanced_validate_seed_after = new_value def _on_generate_time_limit_change(self): is_checked = self.menu_action_timeout_generation_after_a_time_limit.isChecked() with self._options as options: options.advanced_timeout_during_generation = is_checked def _on_menu_action_dark_mode(self): with self._options as options: options.dark_mode = self.menu_action_dark_mode.isChecked() def _open_auto_tracker(self): from randovania.gui.auto_tracker_window import AutoTrackerWindow self.auto_tracker_window = AutoTrackerWindow(common_qt_lib.get_game_connection(), self._options) self.auto_tracker_window.show() def _on_menu_action_previously_generated_games(self): path = self._options.data_dir.joinpath("game_history") try: if platform.system() == "Windows": os.startfile(path) elif platform.system() == "Darwin": subprocess.run(["open", path]) else: subprocess.run(["xdg-open", path]) except OSError: print("Exception thrown :)") box = QtWidgets.QMessageBox(QtWidgets.QMessageBox.Information, "Game History", f"Previously generated games can be found at:\n{path}", QtWidgets.QMessageBox.Ok, self) box.setTextInteractionFlags(Qt.TextSelectableByMouse) box.show() def _on_menu_action_layout_editor(self): from randovania.gui.corruption_layout_editor import CorruptionLayoutEditor self.corruption_editor = CorruptionLayoutEditor() self.corruption_editor.show()
gpl-3.0
imron/scalyr-agent-2
scalyr_agent/third_party/requests/packages/__init__.py
838
1384
''' Debian and other distributions "unbundle" requests' vendored dependencies, and rewrite all imports to use the global versions of ``urllib3`` and ``chardet``. The problem with this is that not only requests itself imports those dependencies, but third-party code outside of the distros' control too. In reaction to these problems, the distro maintainers replaced ``requests.packages`` with a magical "stub module" that imports the correct modules. The implementations were varying in quality and all had severe problems. For example, a symlink (or hardlink) that links the correct modules into place introduces problems regarding object identity, since you now have two modules in `sys.modules` with the same API, but different identities:: requests.packages.urllib3 is not urllib3 With version ``2.5.2``, requests started to maintain its own stub, so that distro-specific breakage would be reduced to a minimum, even though the whole issue is not requests' fault in the first place. See https://github.com/kennethreitz/requests/pull/2375 for the corresponding pull request. ''' from __future__ import absolute_import import sys try: from . import urllib3 except ImportError: import urllib3 sys.modules['%s.urllib3' % __name__] = urllib3 try: from . import chardet except ImportError: import chardet sys.modules['%s.chardet' % __name__] = chardet
apache-2.0
paran0ids0ul/infernal-twin
build/reportlab/src/reportlab/graphics/charts/doughnut.py
28
15381
#Copyright ReportLab Europe Ltd. 2000-2012 #see license.txt for license details #history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/graphics/charts/doughnut.py # doughnut chart __version__=''' $Id$ ''' __doc__="""Doughnut chart Produces a circular chart like the doughnut charts produced by Excel. Can handle multiple series (which produce concentric 'rings' in the chart). """ import copy from math import sin, cos, pi from reportlab.lib import colors from reportlab.lib.validators import isColor, isNumber, isListOfNumbersOrNone,\ isListOfNumbers, isColorOrNone, isString,\ isListOfStringsOrNone, OneOf, SequenceOf,\ isBoolean, isListOfColors,\ isNoneOrListOfNoneOrStrings,\ isNoneOrListOfNoneOrNumbers,\ isNumberOrNone from reportlab.lib.attrmap import * from reportlab.pdfgen.canvas import Canvas from reportlab.graphics.shapes import Group, Drawing, Line, Rect, Polygon, Ellipse, \ Wedge, String, SolidShape, UserNode, STATE_DEFAULTS from reportlab.graphics.widgetbase import Widget, TypedPropertyCollection, PropHolder from reportlab.graphics.charts.piecharts import AbstractPieChart, WedgeProperties, _addWedgeLabel, fixLabelOverlaps from reportlab.graphics.charts.textlabels import Label from reportlab.graphics.widgets.markers import Marker from functools import reduce class SectorProperties(WedgeProperties): """This holds descriptive information about the sectors in a doughnut chart. It is not to be confused with the 'sector itself'; this just holds a recipe for how to format one, and does not allow you to hack the angles. It can format a genuine Sector object for you with its format method. """ _attrMap = AttrMap(BASE=WedgeProperties, ) class Doughnut(AbstractPieChart): _attrMap = AttrMap( x = AttrMapValue(isNumber, desc='X position of the chart within its container.'), y = AttrMapValue(isNumber, desc='Y position of the chart within its container.'), width = AttrMapValue(isNumber, desc='width of doughnut bounding box. Need not be same as width.'), height = AttrMapValue(isNumber, desc='height of doughnut bounding box. Need not be same as height.'), data = AttrMapValue(None, desc='list of numbers defining sector sizes; need not sum to 1'), labels = AttrMapValue(isListOfStringsOrNone, desc="optional list of labels to use for each data point"), startAngle = AttrMapValue(isNumber, desc="angle of first slice; like the compass, 0 is due North"), direction = AttrMapValue(OneOf('clockwise', 'anticlockwise'), desc="'clockwise' or 'anticlockwise'"), slices = AttrMapValue(None, desc="collection of sector descriptor objects"), simpleLabels = AttrMapValue(isBoolean, desc="If true(default) use String not super duper WedgeLabel"), # advanced usage checkLabelOverlap = AttrMapValue(isBoolean, desc="If true check and attempt to fix\n standard label overlaps(default off)",advancedUsage=1), sideLabels = AttrMapValue(isBoolean, desc="If true attempt to make chart with labels along side and pointers", advancedUsage=1) ) def __init__(self): self.x = 0 self.y = 0 self.width = 100 self.height = 100 self.data = [1,1] self.labels = None # or list of strings self.startAngle = 90 self.direction = "clockwise" self.simpleLabels = 1 self.checkLabelOverlap = 0 self.sideLabels = 0 self.slices = TypedPropertyCollection(SectorProperties) self.slices[0].fillColor = colors.darkcyan self.slices[1].fillColor = colors.blueviolet self.slices[2].fillColor = colors.blue self.slices[3].fillColor = colors.cyan self.slices[4].fillColor = colors.pink self.slices[5].fillColor = colors.magenta self.slices[6].fillColor = colors.yellow def demo(self): d = Drawing(200, 100) dn = Doughnut() dn.x = 50 dn.y = 10 dn.width = 100 dn.height = 80 dn.data = [10,20,30,40,50,60] dn.labels = ['a','b','c','d','e','f'] dn.slices.strokeWidth=0.5 dn.slices[3].popout = 10 dn.slices[3].strokeWidth = 2 dn.slices[3].strokeDashArray = [2,2] dn.slices[3].labelRadius = 1.75 dn.slices[3].fontColor = colors.red dn.slices[0].fillColor = colors.darkcyan dn.slices[1].fillColor = colors.blueviolet dn.slices[2].fillColor = colors.blue dn.slices[3].fillColor = colors.cyan dn.slices[4].fillColor = colors.aquamarine dn.slices[5].fillColor = colors.cadetblue dn.slices[6].fillColor = colors.lightcoral d.add(dn) return d def normalizeData(self, data=None): from operator import add sum = float(reduce(add,data,0)) return abs(sum)>=1e-8 and list(map(lambda x,f=360./sum: f*x, data)) or len(data)*[0] def makeSectors(self): # normalize slice data if isinstance(self.data,(list,tuple)) and isinstance(self.data[0],(list,tuple)): #it's a nested list, more than one sequence normData = [] n = [] for l in self.data: t = self.normalizeData(l) normData.append(t) n.append(len(t)) self._seriesCount = max(n) else: normData = self.normalizeData(self.data) n = len(normData) self._seriesCount = n #labels checkLabelOverlap = self.checkLabelOverlap L = [] L_add = L.append if self.labels is None: labels = [] if not isinstance(n,(list,tuple)): labels = [''] * n else: for m in n: labels = list(labels) + [''] * m else: labels = self.labels #there's no point in raising errors for less than enough labels if #we silently create all for the extreme case of no labels. if not isinstance(n,(list,tuple)): i = n-len(labels) if i>0: labels = list(labels) + [''] * i else: tlab = 0 for m in n: tlab += m i = tlab-len(labels) if i>0: labels = list(labels) + [''] * i xradius = self.width/2.0 yradius = self.height/2.0 centerx = self.x + xradius centery = self.y + yradius if self.direction == "anticlockwise": whichWay = 1 else: whichWay = -1 g = Group() startAngle = self.startAngle #% 360 styleCount = len(self.slices) if isinstance(self.data[0],(list,tuple)): #multi-series doughnut ndata = len(self.data) yir = (yradius/2.5)/ndata xir = (xradius/2.5)/ndata ydr = (yradius-yir)/ndata xdr = (xradius-xir)/ndata for sn,series in enumerate(normData): for i,angle in enumerate(series): endAngle = (startAngle + (angle * whichWay)) #% 360 if abs(startAngle-endAngle)<1e-5: startAngle = endAngle continue if startAngle < endAngle: a1 = startAngle a2 = endAngle else: a1 = endAngle a2 = startAngle startAngle = endAngle #if we didn't use %stylecount here we'd end up with the later sectors #all having the default style sectorStyle = self.slices[i%styleCount] # is it a popout? cx, cy = centerx, centery if sectorStyle.popout != 0: # pop out the sector averageAngle = (a1+a2)/2.0 aveAngleRadians = averageAngle * pi/180.0 popdistance = sectorStyle.popout cx = centerx + popdistance * cos(aveAngleRadians) cy = centery + popdistance * sin(aveAngleRadians) yr1 = yir+sn*ydr yr = yr1 + ydr xr1 = xir+sn*xdr xr = xr1 + xdr if isinstance(n,(list,tuple)): theSector = Wedge(cx, cy, xr, a1, a2, yradius=yr, radius1=xr1, yradius1=yr1) else: theSector = Wedge(cx, cy, xr, a1, a2, yradius=yr, radius1=xr1, yradius1=yr1, annular=True) theSector.fillColor = sectorStyle.fillColor theSector.strokeColor = sectorStyle.strokeColor theSector.strokeWidth = sectorStyle.strokeWidth theSector.strokeDashArray = sectorStyle.strokeDashArray g.add(theSector) if sn == 0: text = self.getSeriesName(i,'') if text: averageAngle = (a1+a2)/2.0 aveAngleRadians = averageAngle*pi/180.0 labelRadius = sectorStyle.labelRadius rx = xradius*labelRadius ry = yradius*labelRadius labelX = centerx + (0.5 * self.width * cos(aveAngleRadians) * labelRadius) labelY = centery + (0.5 * self.height * sin(aveAngleRadians) * labelRadius) l = _addWedgeLabel(self,text,averageAngle,labelX,labelY,sectorStyle) if checkLabelOverlap: l._origdata = { 'x': labelX, 'y':labelY, 'angle': averageAngle, 'rx': rx, 'ry':ry, 'cx':cx, 'cy':cy, 'bounds': l.getBounds(), } L_add(l) else: #single series doughnut yir = yradius/2.5 xir = xradius/2.5 for i,angle in enumerate(normData): endAngle = (startAngle + (angle * whichWay)) #% 360 if abs(startAngle-endAngle)<1e-5: startAngle = endAngle continue if startAngle < endAngle: a1 = startAngle a2 = endAngle else: a1 = endAngle a2 = startAngle startAngle = endAngle #if we didn't use %stylecount here we'd end up with the later sectors #all having the default style sectorStyle = self.slices[i%styleCount] # is it a popout? cx, cy = centerx, centery if sectorStyle.popout != 0: # pop out the sector averageAngle = (a1+a2)/2.0 aveAngleRadians = averageAngle * pi/180.0 popdistance = sectorStyle.popout cx = centerx + popdistance * cos(aveAngleRadians) cy = centery + popdistance * sin(aveAngleRadians) if n > 1: theSector = Wedge(cx, cy, xradius, a1, a2, yradius=yradius, radius1=xir, yradius1=yir) elif n==1: theSector = Wedge(cx, cy, xradius, a1, a2, yradius=yradius, radius1=xir, yradius1=yir, annular=True) theSector.fillColor = sectorStyle.fillColor theSector.strokeColor = sectorStyle.strokeColor theSector.strokeWidth = sectorStyle.strokeWidth theSector.strokeDashArray = sectorStyle.strokeDashArray g.add(theSector) # now draw a label if labels[i] != "": averageAngle = (a1+a2)/2.0 aveAngleRadians = averageAngle*pi/180.0 labelRadius = sectorStyle.labelRadius labelX = centerx + (0.5 * self.width * cos(aveAngleRadians) * labelRadius) labelY = centery + (0.5 * self.height * sin(aveAngleRadians) * labelRadius) rx = xradius*labelRadius ry = yradius*labelRadius l = _addWedgeLabel(self,labels[i],averageAngle,labelX,labelY,sectorStyle) if checkLabelOverlap: l._origdata = { 'x': labelX, 'y':labelY, 'angle': averageAngle, 'rx': rx, 'ry':ry, 'cx':cx, 'cy':cy, 'bounds': l.getBounds(), } L_add(l) if checkLabelOverlap and L: fixLabelOverlaps(L) for l in L: g.add(l) return g def draw(self): g = Group() g.add(self.makeSectors()) return g def sample1(): "Make up something from the individual Sectors" d = Drawing(400, 400) g = Group() s1 = Wedge(centerx=200, centery=200, radius=150, startangledegrees=0, endangledegrees=120, radius1=100) s1.fillColor=colors.red s1.strokeColor=None d.add(s1) s2 = Wedge(centerx=200, centery=200, radius=150, startangledegrees=120, endangledegrees=240, radius1=100) s2.fillColor=colors.green s2.strokeColor=None d.add(s2) s3 = Wedge(centerx=200, centery=200, radius=150, startangledegrees=240, endangledegrees=260, radius1=100) s3.fillColor=colors.blue s3.strokeColor=None d.add(s3) s4 = Wedge(centerx=200, centery=200, radius=150, startangledegrees=260, endangledegrees=360, radius1=100) s4.fillColor=colors.gray s4.strokeColor=None d.add(s4) return d def sample2(): "Make a simple demo" d = Drawing(400, 400) dn = Doughnut() dn.x = 50 dn.y = 50 dn.width = 300 dn.height = 300 dn.data = [10,20,30,40,50,60] d.add(dn) return d def sample3(): "Make a more complex demo" d = Drawing(400, 400) dn = Doughnut() dn.x = 50 dn.y = 50 dn.width = 300 dn.height = 300 dn.data = [[10,20,30,40,50,60], [10,20,30,40]] dn.labels = ['a','b','c','d','e','f'] d.add(dn) return d def sample4(): "Make a more complex demo with Label Overlap fixing" d = Drawing(400, 400) dn = Doughnut() dn.x = 50 dn.y = 50 dn.width = 300 dn.height = 300 dn.data = [[10,20,30,40,50,60], [10,20,30,40]] dn.labels = ['a','b','c','d','e','f'] dn.checkLabelOverlap = True d.add(dn) return d if __name__=='__main__': from reportlab.graphics.renderPDF import drawToFile d = sample1() drawToFile(d, 'doughnut1.pdf') d = sample2() drawToFile(d, 'doughnut2.pdf') d = sample3() drawToFile(d, 'doughnut3.pdf')
gpl-3.0
andim/scipy
scipy/optimize/tests/test_zeros.py
59
1963
#!/usr/bin/env python from __future__ import division, print_function, absolute_import from math import sqrt, exp, sin, cos from numpy.testing import (TestCase, assert_almost_equal, assert_warns, assert_, run_module_suite, assert_allclose) from scipy.optimize import zeros as cc from scipy.optimize import zeros # Import testing parameters from scipy.optimize._tstutils import functions, fstrings class TestBasic(TestCase): def run_check(self, method, name): a = .5 b = sqrt(3) for function, fname in zip(functions, fstrings): zero, r = method(function, a, b, xtol=0.1e-12, full_output=True) assert_(r.converged) assert_almost_equal(zero, 1.0, decimal=12, err_msg='method %s, function %s' % (name, fname)) def test_bisect(self): self.run_check(cc.bisect, 'bisect') def test_ridder(self): self.run_check(cc.ridder, 'ridder') def test_brentq(self): self.run_check(cc.brentq, 'brentq') def test_brenth(self): self.run_check(cc.brenth, 'brenth') def test_newton(self): f1 = lambda x: x**2 - 2*x - 1 f1_1 = lambda x: 2*x - 2 f1_2 = lambda x: 2.0 + 0*x f2 = lambda x: exp(x) - cos(x) f2_1 = lambda x: exp(x) + sin(x) f2_2 = lambda x: exp(x) + cos(x) for f, f_1, f_2 in [(f1, f1_1, f1_2), (f2, f2_1, f2_2)]: x = zeros.newton(f, 3, tol=1e-6) assert_allclose(f(x), 0, atol=1e-6) x = zeros.newton(f, 3, fprime=f_1, tol=1e-6) assert_allclose(f(x), 0, atol=1e-6) x = zeros.newton(f, 3, fprime=f_1, fprime2=f_2, tol=1e-6) assert_allclose(f(x), 0, atol=1e-6) def test_deriv_zero_warning(self): func = lambda x: x**2 dfunc = lambda x: 2*x assert_warns(RuntimeWarning, cc.newton, func, 0.0, dfunc) if __name__ == '__main__': run_module_suite()
bsd-3-clause
pexip/os-kombu
kombu/utils/imports.py
5
2072
"""Import related utilities.""" from __future__ import absolute_import, unicode_literals import importlib import sys from kombu.five import reraise, string_t def symbol_by_name(name, aliases={}, imp=None, package=None, sep='.', default=None, **kwargs): """Get symbol by qualified name. The name should be the full dot-separated path to the class:: modulename.ClassName Example:: celery.concurrency.processes.TaskPool ^- class name or using ':' to separate module and symbol:: celery.concurrency.processes:TaskPool If `aliases` is provided, a dict containing short name/long name mappings, the name is looked up in the aliases first. Examples: >>> symbol_by_name('celery.concurrency.processes.TaskPool') <class 'celery.concurrency.processes.TaskPool'> >>> symbol_by_name('default', { ... 'default': 'celery.concurrency.processes.TaskPool'}) <class 'celery.concurrency.processes.TaskPool'> # Does not try to look up non-string names. >>> from celery.concurrency.processes import TaskPool >>> symbol_by_name(TaskPool) is TaskPool True """ if imp is None: imp = importlib.import_module if not isinstance(name, string_t): return name # already a class name = aliases.get(name) or name sep = ':' if ':' in name else sep module_name, _, cls_name = name.rpartition(sep) if not module_name: cls_name, module_name = None, package if package else cls_name try: try: module = imp(module_name, package=package, **kwargs) except ValueError as exc: reraise(ValueError, ValueError("Couldn't import {0!r}: {1}".format(name, exc)), sys.exc_info()[2]) return getattr(module, cls_name) if cls_name else module except (ImportError, AttributeError): if default is None: raise return default
bsd-3-clause
mancoast/CPythonPyc_test
cpython/213_test_re.py
5
12461
import sys sys.path = ['.'] + sys.path from test_support import verify, verbose, TestFailed import re import sys, os, traceback # Misc tests from Tim Peters' re.doc if verbose: print 'Running tests on re.search and re.match' try: verify(re.search('x*', 'axx').span(0) == (0, 0)) verify(re.search('x*', 'axx').span() == (0, 0)) verify(re.search('x+', 'axx').span(0) == (1, 3)) verify(re.search('x+', 'axx').span() == (1, 3)) verify(re.search('x', 'aaa') is None) except: raise TestFailed, "re.search" try: verify(re.match('a*', 'xxx').span(0) == (0, 0)) verify(re.match('a*', 'xxx').span() == (0, 0)) verify(re.match('x*', 'xxxa').span(0) == (0, 3)) verify(re.match('x*', 'xxxa').span() == (0, 3)) verify(re.match('a+', 'xxx') is None) except: raise TestFailed, "re.search" if verbose: print 'Running tests on re.sub' try: verify(re.sub("(?i)b+", "x", "bbbb BBBB") == 'x x') def bump_num(matchobj): int_value = int(matchobj.group(0)) return str(int_value + 1) verify(re.sub(r'\d+', bump_num, '08.2 -2 23x99y') == '9.3 -3 24x100y') verify(re.sub(r'\d+', bump_num, '08.2 -2 23x99y', 3) == '9.3 -3 23x99y') verify(re.sub('.', lambda m: r"\n", 'x') == '\\n') verify(re.sub('.', r"\n", 'x') == '\n') s = r"\1\1" verify(re.sub('(.)', s, 'x') == 'xx') verify(re.sub('(.)', re.escape(s), 'x') == s) verify(re.sub('(.)', lambda m: s, 'x') == s) verify(re.sub('(?P<a>x)', '\g<a>\g<a>', 'xx') == 'xxxx') verify(re.sub('(?P<a>x)', '\g<a>\g<1>', 'xx') == 'xxxx') verify(re.sub('(?P<unk>x)', '\g<unk>\g<unk>', 'xx') == 'xxxx') verify(re.sub('(?P<unk>x)', '\g<1>\g<1>', 'xx') == 'xxxx') verify(re.sub('a', r'\t\n\v\r\f\a\b\B\Z\a\A\w\W\s\S\d\D', 'a') == '\t\n\v\r\f\a\b\\B\\Z\a\\A\\w\\W\\s\\S\\d\\D') verify(re.sub('a', '\t\n\v\r\f\a', 'a') == '\t\n\v\r\f\a') verify(re.sub('a', '\t\n\v\r\f\a', 'a') == (chr(9)+chr(10)+chr(11)+chr(13)+chr(12)+chr(7))) verify(re.sub('^\s*', 'X', 'test') == 'Xtest') except AssertionError: raise TestFailed, "re.sub" try: verify(re.sub('a', 'b', 'aaaaa') == 'bbbbb') verify(re.sub('a', 'b', 'aaaaa', 1) == 'baaaa') except AssertionError: raise TestFailed, "qualified re.sub" if verbose: print 'Running tests on symbolic references' try: re.sub('(?P<a>x)', '\g<a', 'xx') except re.error, reason: pass else: raise TestFailed, "symbolic reference" try: re.sub('(?P<a>x)', '\g<', 'xx') except re.error, reason: pass else: raise TestFailed, "symbolic reference" try: re.sub('(?P<a>x)', '\g', 'xx') except re.error, reason: pass else: raise TestFailed, "symbolic reference" try: re.sub('(?P<a>x)', '\g<a a>', 'xx') except re.error, reason: pass else: raise TestFailed, "symbolic reference" try: re.sub('(?P<a>x)', '\g<1a1>', 'xx') except re.error, reason: pass else: raise TestFailed, "symbolic reference" try: re.sub('(?P<a>x)', '\g<ab>', 'xx') except IndexError, reason: pass else: raise TestFailed, "symbolic reference" try: re.sub('(?P<a>x)|(?P<b>y)', '\g<b>', 'xx') except re.error, reason: pass else: raise TestFailed, "symbolic reference" try: re.sub('(?P<a>x)|(?P<b>y)', '\\2', 'xx') except re.error, reason: pass else: raise TestFailed, "symbolic reference" if verbose: print 'Running tests on re.subn' try: verify(re.subn("(?i)b+", "x", "bbbb BBBB") == ('x x', 2)) verify(re.subn("b+", "x", "bbbb BBBB") == ('x BBBB', 1)) verify(re.subn("b+", "x", "xyz") == ('xyz', 0)) verify(re.subn("b*", "x", "xyz") == ('xxxyxzx', 4)) verify(re.subn("b*", "x", "xyz", 2) == ('xxxyz', 2)) except AssertionError: raise TestFailed, "re.subn" if verbose: print 'Running tests on re.split' try: verify(re.split(":", ":a:b::c") == ['', 'a', 'b', '', 'c']) verify(re.split(":*", ":a:b::c") == ['', 'a', 'b', 'c']) verify(re.split("(:*)", ":a:b::c") == ['', ':', 'a', ':', 'b', '::', 'c']) verify(re.split("(?::*)", ":a:b::c") == ['', 'a', 'b', 'c']) verify(re.split("(:)*", ":a:b::c") == ['', ':', 'a', ':', 'b', ':', 'c']) verify(re.split("([b:]+)", ":a:b::c") == ['', ':', 'a', ':b::', 'c']) verify(re.split("(b)|(:+)", ":a:b::c") == \ ['', None, ':', 'a', None, ':', '', 'b', None, '', None, '::', 'c'] ) verify(re.split("(?:b)|(?::+)", ":a:b::c") == ['', 'a', '', '', 'c']) except AssertionError: raise TestFailed, "re.split" try: verify(re.split(":", ":a:b::c", 2) == ['', 'a', 'b::c']) verify(re.split(':', 'a:b:c:d', 2) == ['a', 'b', 'c:d']) verify(re.split("(:)", ":a:b::c", 2) == ['', ':', 'a', ':', 'b::c']) verify(re.split("(:*)", ":a:b::c", 2) == ['', ':', 'a', ':', 'b::c']) except AssertionError: raise TestFailed, "qualified re.split" if verbose: print "Running tests on re.findall" try: verify(re.findall(":+", "abc") == []) verify(re.findall(":+", "a:b::c:::d") == [":", "::", ":::"]) verify(re.findall("(:+)", "a:b::c:::d") == [":", "::", ":::"]) verify(re.findall("(:)(:*)", "a:b::c:::d") == [(":", ""), (":", ":"), (":", "::")] ) except AssertionError: raise TestFailed, "re.findall" if verbose: print "Running tests on re.match" try: # No groups at all m = re.match('a', 'a') ; verify(m.groups() == ()) # A single group m = re.match('(a)', 'a') ; verify(m.groups() == ('a',)) pat = re.compile('((a)|(b))(c)?') verify(pat.match('a').groups() == ('a', 'a', None, None)) verify(pat.match('b').groups() == ('b', None, 'b', None)) verify(pat.match('ac').groups() == ('a', 'a', None, 'c')) verify(pat.match('bc').groups() == ('b', None, 'b', 'c')) verify(pat.match('bc').groups("") == ('b', "", 'b', 'c')) except AssertionError: raise TestFailed, "match .groups() method" try: # A single group m = re.match('(a)', 'a') verify(m.group(0) == 'a') verify(m.group(0) == 'a') verify(m.group(1) == 'a') verify(m.group(1, 1) == ('a', 'a')) pat = re.compile('(?:(?P<a1>a)|(?P<b2>b))(?P<c3>c)?') verify(pat.match('a').group(1, 2, 3) == ('a', None, None)) verify(pat.match('b').group('a1', 'b2', 'c3') == (None, 'b', None)) verify(pat.match('ac').group(1, 'b2', 3) == ('a', None, 'c')) except AssertionError: raise TestFailed, "match .group() method" if verbose: print "Running tests on re.escape" try: p="" for i in range(0, 256): p = p + chr(i) verify(re.match(re.escape(chr(i)), chr(i)) is not None) verify(re.match(re.escape(chr(i)), chr(i)).span() == (0,1)) pat=re.compile( re.escape(p) ) verify(pat.match(p) is not None) verify(pat.match(p).span() == (0,256)) except AssertionError: raise TestFailed, "re.escape" if verbose: print 'Pickling a RegexObject instance' import pickle pat = re.compile('a(?:b|(c|e){1,2}?|d)+?(.)') s = pickle.dumps(pat) pat = pickle.loads(s) try: verify(re.I == re.IGNORECASE) verify(re.L == re.LOCALE) verify(re.M == re.MULTILINE) verify(re.S == re.DOTALL) verify(re.X == re.VERBOSE) except AssertionError: raise TestFailed, 're module constants' for flags in [re.I, re.M, re.X, re.S, re.L]: try: r = re.compile('^pattern$', flags) except: print 'Exception raised on flag', flags if verbose: print 'Test engine limitations' # Try nasty case that overflows the straightforward recursive # implementation of repeated groups. try: verify(re.match('(x)*', 50000*'x').span() == (0, 50000)) except RuntimeError, v: print v from re_tests import * if verbose: print 'Running re_tests test suite' else: # To save time, only run the first and last 10 tests #tests = tests[:10] + tests[-10:] pass for t in tests: sys.stdout.flush() pattern = s = outcome = repl = expected = None if len(t) == 5: pattern, s, outcome, repl, expected = t elif len(t) == 3: pattern, s, outcome = t else: raise ValueError, ('Test tuples should have 3 or 5 fields', t) try: obj = re.compile(pattern) except re.error: if outcome == SYNTAX_ERROR: pass # Expected a syntax error else: print '=== Syntax error:', t except KeyboardInterrupt: raise KeyboardInterrupt except: print '*** Unexpected error ***', t if verbose: traceback.print_exc(file=sys.stdout) else: try: result = obj.search(s) except re.error, msg: print '=== Unexpected exception', t, repr(msg) if outcome == SYNTAX_ERROR: # This should have been a syntax error; forget it. pass elif outcome == FAIL: if result is None: pass # No match, as expected else: print '=== Succeeded incorrectly', t elif outcome == SUCCEED: if result is not None: # Matched, as expected, so now we compute the # result string and compare it to our expected result. start, end = result.span(0) vardict={'found': result.group(0), 'groups': result.group(), 'flags': result.re.flags} for i in range(1, 100): try: gi = result.group(i) # Special hack because else the string concat fails: if gi is None: gi = "None" except IndexError: gi = "Error" vardict['g%d' % i] = gi for i in result.re.groupindex.keys(): try: gi = result.group(i) if gi is None: gi = "None" except IndexError: gi = "Error" vardict[i] = gi repl = eval(repl, vardict) if repl != expected: print '=== grouping error', t, print repr(repl) + ' should be ' + repr(expected) else: print '=== Failed incorrectly', t # Try the match on a unicode string, and check that it # still succeeds. try: result = obj.search(unicode(s, "latin-1")) if result is None: print '=== Fails on unicode match', t except NameError: continue # 1.5.2 except TypeError: continue # unicode test case # Try the match on a unicode pattern, and check that it # still succeeds. obj=re.compile(unicode(pattern, "latin-1")) result = obj.search(s) if result is None: print '=== Fails on unicode pattern match', t # Try the match with the search area limited to the extent # of the match and see if it still succeeds. \B will # break (because it won't match at the end or start of a # string), so we'll ignore patterns that feature it. if pattern[:2] != '\\B' and pattern[-2:] != '\\B' \ and result is not None: obj = re.compile(pattern) result = obj.search(s, result.start(0), result.end(0) + 1) if result is None: print '=== Failed on range-limited match', t # Try the match with IGNORECASE enabled, and check that it # still succeeds. obj = re.compile(pattern, re.IGNORECASE) result = obj.search(s) if result is None: print '=== Fails on case-insensitive match', t # Try the match with LOCALE enabled, and check that it # still succeeds. obj = re.compile(pattern, re.LOCALE) result = obj.search(s) if result is None: print '=== Fails on locale-sensitive match', t # Try the match with UNICODE locale enabled, and check # that it still succeeds. obj = re.compile(pattern, re.UNICODE) result = obj.search(s) if result is None: print '=== Fails on unicode-sensitive match', t
gpl-3.0
maltsev/LatexWebOffice
app/views/document.py
1
15983
# -*- coding: utf-8 -*- """ * Purpose : Dokument- und Projektverwaltung Schnittstelle * Creation Date : 19-11-2014 * Last Modified : Di 24 Feb 2015 15:46:51 CET * Author : mattis * Coauthors : christian, ingo, Kirill * Sprintnumber : 2, 5 * Backlog entry : TEK1, 3ED9, DOK8, DO14, KOL1 """ import os from django.contrib.auth.decorators import login_required from django.views.decorators.http import require_http_methods from django.shortcuts import render from django.views.static import serve import settings from app.common import util from app.common.constants import ERROR_MESSAGES from app.views import file, folder, project, template from app.models.projecttemplate import ProjectTemplate from app.models.file.file import File from app.models.file.texfile import TexFile from app.models.file.plaintextfile import PlainTextFile from app.models.file.pdf import PDF from app.models.project import Project from app.models.folder import Folder globalparas = { 'id': {'name': 'id', 'type': int}, 'content': {'name': 'content', 'type': str}, 'folderid': {'name': 'folderid', 'type': int}, 'name': {'name': 'name', 'type': str}, 'formatid': {'name': 'formatid', 'type': int}, # 'compilerid': {'name': 'compilerid', 'type': int}, 'forcecompile': {'name': 'forcecompile', 'type': int} } # dictionary mit verfügbaren Befehlen und den entsprechenden Aktionen # die entsprechenden Methoden befinden sich in: # '/app/views/project.py', '/app/views/file.py', '/app/views/folder.py' und '/app/views/collaboration.py' available_commands = { 'projectcreate': { 'command': project.projectCreate, 'parameters': [{'para': globalparas['name'], 'stringcheck': True}] }, 'projectclone': { 'command': project.projectClone, 'parameters': [{'para': globalparas['id'], 'type': Project, 'requirerights': ['owner', 'collaborator']}, {'para': globalparas['name'], 'stringcheck': True}] }, 'projectrm': { 'command': project.projectRm, 'parameters': [{'para': globalparas['id'], 'type': Project}] }, 'projectrename': { 'command': project.projectRename, 'parameters': [{'para': globalparas['id'], 'type': Project}, {'para': globalparas['name'], 'stringcheck': True}] }, 'listprojects': { 'command': project.listProjects, 'parameters': [] }, 'importzip': { 'command': project.importZip, 'parameters': [] }, 'exportzip': { 'command': project.exportZip, 'parameters': [{'para': globalparas['id']}] }, 'inviteuser': { 'command': project.inviteUser, 'parameters': [{'para': globalparas['id'], 'type': Project}, {'para': globalparas['name'], 'stringcheck': True}] }, 'hasinvitedusers': { 'command': project.hasInvitedUsers, 'parameters': [{'para': globalparas['id'], 'type': Project}] }, 'listinvitedusers': { 'command': project.listInvitedUsers, 'parameters': [{'para': globalparas['id'], 'type': Project}] }, 'listunconfirmedcollaborativeprojects': { 'command': project.listUnconfirmedCollaborativeProjects, 'parameters': [] }, 'activatecollaboration': { 'command': project.activateCollaboration, 'parameters': [{'para': globalparas['id'], 'type': Project, 'requirerights': ['owner', 'invitee']}] }, 'quitcollaboration': { 'command': project.quitCollaboration, 'parameters': [ {'para': globalparas['id'], 'type': Project, 'requirerights': ['owner', 'invitee', 'collaborator']}] }, 'cancelcollaboration': { 'command': project.cancelCollaboration, 'parameters': [{'para': globalparas['id'], 'type': Project}, {'para': globalparas['name'], 'stringcheck': True}] }, 'createtex': { 'command': file.createTexFile, 'parameters': [{'para': globalparas['id'], 'type': Folder, 'requirerights': ['owner', 'collaborator']}, {'para': globalparas['name'], 'filenamecheck': True}] }, 'updatefile': { 'command': file.updateFile, 'parameters': [{'para': globalparas['id'], 'type': PlainTextFile, 'requirerights': ['owner', 'collaborator'], 'lockcheck': False}, {'para': globalparas['content']}] }, 'deletefile': { 'command': file.deleteFile, 'parameters': [{'para': globalparas['id'], 'type': File, 'requirerights': ['owner', 'collaborator'], 'lockcheck': True}] }, 'renamefile': { 'command': file.renameFile, 'parameters': [{'para': globalparas['id'], 'type': File, 'requirerights': ['owner', 'collaborator'], 'lockcheck': True}, {'para': globalparas['name'], 'filenamecheck': True}] }, 'movefile': { 'command': file.moveFile, 'parameters': [{'para': globalparas['id'], 'type': File, 'requirerights': ['owner', 'collaborator'], 'lockcheck': True}, {'para': globalparas['folderid'], 'type': Folder, 'requirerights': ['owner', 'collaborator']}] }, 'uploadfiles': { 'command': file.uploadFiles, 'parameters': [{'para': globalparas['id'], 'type': Folder, 'requirerights': ['owner', 'collaborator']}] }, 'downloadfile': { 'command': file.downloadFile, 'parameters': [{'para': globalparas['id']}] }, 'gettext': { 'command': file.getText, 'parameters': [{'para': globalparas['id'], 'type': PlainTextFile, 'requirerights': ['owner', 'collaborator']}] }, 'fileinfo': { 'command': file.fileInfo, 'parameters': [{'para': globalparas['id'], 'type': File, 'requirerights': ['owner', 'collaborator']}] }, 'compile': { 'command': file.latexCompile, 'parameters': [{'para': globalparas['id'], 'type': TexFile, 'requirerights': ['owner', 'collaborator'], 'lockcheck': True}, {'para': globalparas['formatid']}, # {'para': globalparas['compilerid']}, {'para': globalparas['forcecompile']}] }, 'lockfile': { 'command': file.lockFile, 'parameters': [{'para': globalparas['id'], 'type': File, 'requirerights': ['owner', 'collaborator']}] }, 'unlockfile': { 'command': file.unlockFile, 'parameters': [{'para': globalparas['id'], 'type': File, 'requirerights': ['owner', 'collaborator']}] }, 'getlog': { 'command': file.getLog, 'parameters': [{'para': globalparas['id'], 'type': TexFile, 'requirerights': ['owner', 'collaborator']}] }, 'createdir': { 'command': folder.createDir, 'parameters': [{'para': globalparas['id'], 'type': Folder, 'requirerights': ['owner', 'collaborator']}, {'para': globalparas['name'], 'stringcheck': True}] }, 'rmdir': { 'command': folder.rmDir, 'parameters': [{'para': globalparas['id'], 'type': Folder, 'requirerights': ['owner', 'collaborator'], 'lockcheck': True}] }, 'renamedir': { 'command': folder.renameDir, 'parameters': [{'para': globalparas['id'], 'type': Folder, 'requirerights': ['owner', 'collaborator']}, {'para': globalparas['name'], 'stringcheck': True}] }, 'movedir': { 'command': folder.moveDir, 'parameters': [{'para': globalparas['id'], 'type': Folder, 'requirerights': ['owner', 'collaborator'], 'lockcheck': True}, {'para': globalparas['folderid'], 'type': Folder, 'requirerights': ['owner', 'collaborator']}] }, 'listfiles': { 'command': folder.listFiles, 'parameters': [{'para': globalparas['id'], 'type': Folder, 'requirerights': ['owner', 'collaborator']}] }, 'template2project': { 'command': template.template2Project, 'parameters': [{'para': globalparas['id'], 'type': ProjectTemplate}, {'para': globalparas['name'], 'stringcheck': True}] }, 'project2template': { 'command': template.project2Template, 'parameters': [{'para': globalparas['id'], 'type': Project, 'requirerights': ['owner', 'collaborator']}, {'para': globalparas['name'], 'stringcheck': True}] }, 'templaterm': { 'command': template.templateRm, 'parameters': [{'para': globalparas['id'], 'type': ProjectTemplate}] }, 'templaterename': { 'command': template.templateRename, 'parameters': [{'para': globalparas['id'], 'type': ProjectTemplate}, {'para': globalparas['name'], 'stringcheck': True}] }, 'listtemplates': { 'command': template.listTemplates, 'parameters': [] } } available_commands_output = {} for key, value in available_commands.items(): parameters = [] for paras in value['parameters']: globalparainfo = (paras['para']).copy() value = {'para': globalparainfo} if globalparainfo.get('type'): del globalparainfo['type'] parameters.append(value) if key == 'uploadfiles' or key == 'importzip': parameters.append({'para': {'name': 'files'}}) available_commands_output.update({key: parameters}) @login_required def debug(request): return render(request, 'documentPoster.html') # Schnittstellenfunktion # bietet eine Schnittstelle zur Kommunikation zwischen Client und Server # liest den vom Client per POST Data übergebenen Befehl ein # und führt die entsprechende Methode aus @login_required @require_http_methods(['POST', 'GET']) def execute(request): if request.method == 'POST' and 'command' in request.POST: # hole den aktuellen Benutzer user = request.user # wenn der Schlüssel nicht gefunden wurde # gib Fehlermeldung zurück if request.POST['command'] not in available_commands: return util.jsonErrorResponse(ERROR_MESSAGES['COMMANDNOTFOUND'], request) args = [] # aktueller Befehl c = available_commands[request.POST['command']] # Parameter dieses Befehls paras = c['parameters'] # durchlaufe alle Parameter des Befehls for para in paras: # wenn der Parameter nicht gefunden wurde oder ein Parameter, welcher eine id angeben sollte # Zeichen enthält, die keine Zahlen sind, gib Fehlermeldung zurück if request.POST.get(para['para']['name']) is None: return util.jsonErrorResponse(ERROR_MESSAGES['MISSINGPARAMETER'] % (para['para']), request) elif para['para']['type'] == int and (not request.POST.get(para['para']['name']).isdigit()): return util.jsonErrorResponse(ERROR_MESSAGES['MISSINGPARAMETER'] % (para['para']), request) # sonst füge den Parameter zu der Argumentliste hinzu else: args.append(request.POST[para['para']['name']]) # Teste auf ungültige strings if para.get('stringcheck'): failstring, failurereturn = util.checkObjectForInvalidString( request.POST.get(para['para']['name']), request) if not failstring: return failurereturn elif para.get('filenamecheck'): failstring, failurereturn = util.checkFileForInvalidString( request.POST.get(para['para']['name']), request) if not failstring: return failurereturn # Teste, dass der User rechte auf das Objekt mit der angegebenen id # hat und diese existiert if para.get('type') and para['para']['type'] == int: objType = para.get('type') objId = request.POST.get(para['para']['name']) requireRights = para.get('requirerights', ['owner']) lockcheck = para.get('lockcheck', False) if objType == Project: rights, failurereturn = util.checkIfProjectExistsAndUserHasRights(objId, user, request, requireRights) if not rights: return failurereturn elif objType == Folder: rights, failurereturn = util.checkIfDirExistsAndUserHasRights(objId, user, request, requireRights, lockcheck) if not rights: return failurereturn elif objType == File: rights, failurereturn = util.checkIfFileExistsAndUserHasRights(objId, user, request, requireRights, lockcheck, objecttype=File) if not rights: return failurereturn elif objType == TexFile: rights, failurereturn = util.checkIfFileExistsAndUserHasRights(objId, user, request, requireRights, lockcheck, objecttype=TexFile) if not rights: return failurereturn elif objType == PlainTextFile: rights, failurereturn = util.checkIfFileExistsAndUserHasRights(objId, user, request, requireRights, lockcheck, objecttype=PlainTextFile) if not rights: return failurereturn elif objType == ProjectTemplate: # Überprüfe, ob Vorlage existiert und der User darauf Rechte hat emptystring, failurereturn = util.checkIfTemplateExistsAndUserHasRights(objId, user, request) if not emptystring: return failurereturn # führe den übergebenen Befehl aus return c['command'](request, user, *args) elif request.method == 'GET' and request.GET.get('command'): command = request.GET.get('command') pdfid = request.GET.get('id') texid = request.GET.get('texid') defaultpdfPath = filepath = os.path.join(settings.BASE_DIR, 'app', 'static', 'default.pdf') if (pdfid and not pdfid.isdigit()) or (texid and not texid.isdigit()): return serve(request, os.path.basename(defaultpdfPath), os.path.dirname(defaultpdfPath)) if command == 'getpdf' and pdfid: requireRights = ['owner', 'collaborator'] rights, failurereturn = util.checkIfFileExistsAndUserHasRights(pdfid, request.user, request, requireRights, lockcheck=False, objecttype=PDF) if not rights: return serve(request, os.path.basename(defaultpdfPath), os.path.dirname(defaultpdfPath)) return file.getPDF(request, request.user, pdfid=pdfid, default=defaultpdfPath) elif command == 'getpdf' and texid: requireRights = ['owner', 'collaborator'] rights, failurereturn = util.checkIfFileExistsAndUserHasRights(texid, request.user, request, requireRights, lockcheck=False, objecttype=TexFile) if not rights: return serve(request, os.path.basename(defaultpdfPath), os.path.dirname(defaultpdfPath)) return file.getPDF(request, request.user, texid=texid, default=defaultpdfPath) return util.jsonErrorResponse(ERROR_MESSAGES['MISSINGPARAMETER'] % 'unknown', request)
gpl-3.0
DavidNorman/tensorflow
tensorflow/python/tpu/session_support.py
5
15060
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the 'License'); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an 'AS IS' BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ====================================== """Operations for handling session logging and shutdown notifications.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import threading import time from google.protobuf import text_format from tensorflow.core.protobuf import config_pb2 from tensorflow.core.util import event_pb2 from tensorflow.python.client import session as session_lib from tensorflow.python.framework import dtypes from tensorflow.python.framework import errors from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.platform import tf_logging as logging from tensorflow.python.tpu.ops import tpu_ops from tensorflow.python.training import session_run_hook from tensorflow.python.training import training_util _WATCHDOG = None class CoordinatorResetError(errors.AbortedError): """Raised when the monitored session should reset.""" def __init__(self): errors.AbortedError.__init__( self, None, None, 'Resetting session loop due to worker shutdown.') def _clone_session(session, graph=None): return session_lib.Session( target=session.sess_str, config=session._config, # pylint: disable=protected-access graph=graph if graph else session.graph) class WorkerHeartbeatManager(object): """Manages the status/heartbeat monitor for a set of workers.""" def __init__(self, session, devices, heartbeat_ops, request_placeholder): """Construct a new WorkerHeartbeatManager. (Prefer using `WorkerHeartbeatManager.from_devices` when possible.) Args: session: `tf.compat.v1.Session`, session to use for heartbeat operations. devices: `list[string]` Set of devices to connect to. heartbeat_ops: `list[tf.Operation]` Heartbeat operations. request_placeholder: `tf.Placeholder[String]` Placeholder used to specify the WorkerHeartbeatRequest protocol buffer. """ self._session = session self._devices = devices self._ops = heartbeat_ops self._request_placeholder = request_placeholder @staticmethod def from_devices(session, devices): """Construct a heartbeat manager for the given devices.""" if not devices: logging.error('Trying to create heartbeat manager with no devices?') logging.info('Creating heartbeat manager for %s', devices) request_placeholder = array_ops.placeholder( name='worker_heartbeat_request', dtype=dtypes.string) heartbeat_ops = [] for device in devices: with ops.device(device): heartbeat_ops.append(tpu_ops.worker_heartbeat(request_placeholder)) return WorkerHeartbeatManager(session, devices, heartbeat_ops, request_placeholder) def num_workers(self): return len(self._devices) def configure(self, message): """Configure heartbeat manager for all devices. Args: message: `event_pb2.WorkerHeartbeatRequest` Returns: `None` """ logging.info('Configuring worker heartbeat: %s', text_format.MessageToString(message)) self._session.run(self._ops, {self._request_placeholder: message.SerializeToString()}) def ping(self, request=None, timeout_in_ms=5000): """Ping all workers, returning the parsed status results.""" if request is None: request = event_pb2.WorkerHeartbeatRequest() options = config_pb2.RunOptions(timeout_in_ms=timeout_in_ms) results = self._session.run( self._ops, feed_dict={self._request_placeholder: request.SerializeToString()}, options=options) parsed_results = [ event_pb2.WorkerHeartbeatResponse.FromString(res_pb) for res_pb in results ] logging.debug('Ping results: %s', parsed_results) return parsed_results def lame_workers(self): """Ping all workers, returning manager containing lame workers (or None).""" ping_results = self.ping() lame_workers = [] for ping_response, device, op in zip(ping_results, self._devices, self._ops): if ping_response.health_status != event_pb2.OK: lame_workers.append((device, op)) if not lame_workers: return None bad_devices, bad_ops = zip(*lame_workers) return WorkerHeartbeatManager(self._session, bad_devices, bad_ops, self._request_placeholder) def __repr__(self): return 'HeartbeatManager(%s)' % ','.join(self._devices) # Default timeout is set to allow other shutdown triggered operations (log # flushing etc) to finish before terminating the worker. def shutdown(self, wait_time_in_ms=60000, exit_code=None): """Shutdown all workers after `shutdown_timeout_secs`.""" logging.info('Shutting down %s.', self) req = event_pb2.WorkerHeartbeatRequest( watchdog_config=event_pb2.WatchdogConfig(timeout_ms=wait_time_in_ms), shutdown_mode=event_pb2.SHUTDOWN_AFTER_TIMEOUT, exit_code=event_pb2.RequestedExitCode( exit_code=exit_code) if exit_code is not None else None) self.configure(req) # Wait for workers to shutdown. sleep_sec = 10.0 + wait_time_in_ms / 1000 logging.info('Waiting %.2f seconds for worker shutdown.', sleep_sec) time.sleep(sleep_sec) def all_worker_devices(session): """Return a list of devices for each worker in the system.""" devices = session.list_devices() devices_that_support_heartbeats = [] for device in devices: name = device.name # Pick devices that have a TPU but target the attached CPU if ':TPU:0' in name and 'coordinator' not in name: devices_that_support_heartbeats.append(name.replace('TPU', 'CPU')) return devices_that_support_heartbeats class WatchdogManager(threading.Thread): """Configures worker watchdog timer and handles periodic pings. Usage: # Ping workers every minute, shutting down workers if they haven't received # a ping after 1 hour. watchdog_manager = WatchdogManager( ping_interval=60, shutdown_timeout=3600 ) # Use as a context manager, resetting watchdog on context exit: with watchdog_manager: session.run(...) # Or setup globally; watchdog will remain active until program exit. watchdog_manager.configure_and_run() """ def __init__(self, session, devices=None, ping_interval=60, shutdown_timeout=3600): """Initialize a watchdog manager. Args: session: Session connected to worker devices. A cloned session and graph will be created for managing worker pings. devices: Set of devices to monitor. If none, all workers will be monitored. ping_interval: Time, in seconds, between watchdog pings. shutdown_timeout: Time, in seconds, before watchdog timeout. """ threading.Thread.__init__(self) self.ping_interval = ping_interval self.shutdown_timeout = shutdown_timeout self.daemon = True self._config = session._config # pylint: disable=protected-access self._target = session.sess_str self._running = False self._devices = devices self._graph = None self._session = None self._worker_manager = None def _reset_manager(self): """Reset the graph, session and worker manager.""" self._graph = ops.Graph() self._session = session_lib.Session( target=self._target, graph=self._graph, config=self._config, ) if self._devices is None: self._devices = all_worker_devices(self._session) with self._graph.as_default(): self._worker_manager = WorkerHeartbeatManager.from_devices( self._session, self._devices) self._worker_manager.configure( event_pb2.WorkerHeartbeatRequest( watchdog_config=event_pb2.WatchdogConfig( timeout_ms=self.shutdown_timeout * 1000,), shutdown_mode=event_pb2.WAIT_FOR_COORDINATOR)) def configure_and_run(self): logging.info( 'Enabling watchdog timer with %d second timeout ' 'and %d second ping interval.', self.shutdown_timeout, self.ping_interval) self._reset_manager() self._running = True self.start() def stop(self): logging.info('Stopping worker watchdog.') self._worker_manager.configure( event_pb2.WorkerHeartbeatRequest( watchdog_config=event_pb2.WatchdogConfig(timeout_ms=-1,), shutdown_mode=event_pb2.NOT_CONFIGURED)) self._running = False self.join() def __enter__(self): self.configure_and_run() def __exit__(self, exc_type, exc_val, exc_tb): self.stop() def run(self): # Don't fetch logs or adjust timing: just ping the watchdog. # # If we hit an exception, reset our session as it is likely broken. while self._running: try: self._worker_manager.ping(request=None) time.sleep(self.ping_interval) except errors.OpError as e: # Catch any TF errors that occur so we don't stop sending heartbeats logging.debug('Caught error while sending heartbeat: %s', e) self._reset_manager() def start_worker_watchdog(session, devices=None, ping_interval=60, shutdown_timeout=3600): """Start global worker watchdog to shutdown workers on coordinator exit.""" global _WATCHDOG if _WATCHDOG is None: # Ensure we can send a few pings before we timeout! ping_interval = min(shutdown_timeout / 10., ping_interval) _WATCHDOG = WatchdogManager(session, devices, ping_interval, shutdown_timeout) _WATCHDOG.configure_and_run() class GracefulShutdownHook(session_run_hook.SessionRunHook): """Session hook that watches for shutdown events. If a shutdown is indicated, `saver.save(checkpoint_prefix)` is executed, and a SystemShutdown exception is raised to terminate the main session. If `saver` is None the `SAVERS` collection will be read to find a saver. `on_shutdown_hooks` is an optional list of functions that should be called after checkpointing. The function is called with (`run_context`, `all_workers`, `lame_workers`). If `heartbeat_group` is not specified, it will default to all CPU workers in the system. """ def __init__(self, checkpoint_prefix, saver=None, on_shutdown_hooks=None): self._saver = saver self._checkpoint_prefix = checkpoint_prefix self._on_shutdown_hooks = on_shutdown_hooks if on_shutdown_hooks else [] # Worker heartbeats are managed independently of the main training graph. self._graph = ops.Graph() self._workers = None self._session = None self._heartbeat_supported = False def after_create_session(self, training_session, coord): # pylint: disable=unused-argument # N.B. We have to pull the global step here to avoid it being unavailable # at checkpoint time; the graph has been frozen at that point. if training_util.get_global_step() is None and self.saver() is not None: raise ValueError( 'Saver defined but no global step. Run `get_or_create_global_step()`' ' in your model definition to allow checkpointing.') with self._graph.as_default(): logging.info('Installing graceful shutdown hook.') self._session = _clone_session(training_session, self._graph) self._workers = WorkerHeartbeatManager.from_devices( self._session, all_worker_devices(self._session)) self._heartbeat_supported = self._workers.num_workers() > 0 if self._heartbeat_supported: try: self._workers.configure( event_pb2.WorkerHeartbeatRequest( shutdown_mode=event_pb2.WAIT_FOR_COORDINATOR)) except errors.InvalidArgumentError: logging.warn( 'TPU device does not support heartbeats. Failure ' 'handling will be disabled.') self._heartbeat_supported = False else: logging.warn( 'No workers support hearbeats. Failure handling will be disabled.') def saver(self): if self._saver: return self._saver savers = ops.get_collection(ops.GraphKeys.SAVERS) if not savers: return None if not isinstance(savers, list): return savers if len(savers) > 1: logging.error( 'Multiple savers in the SAVERS collection. On-demand checkpointing ' 'will be disabled. Pass an explicit `saver` to the constructor to ' 'override this behavior.') return None return savers[0] def after_run(self, run_context, run_values): del run_values if not self._heartbeat_supported: return lame_workers = self._workers.lame_workers() if lame_workers: logging.info('ShutdownHook: lame workers found: %s', lame_workers) if self.saver(): logging.info('ShutdownHook: saving checkpoint to %s', self._checkpoint_prefix) self.saver().save( run_context.session, self._checkpoint_prefix, global_step=training_util.get_global_step(), write_state=True, ) else: logging.info('ShutdownHook: no Saver defined.') for fn in self._on_shutdown_hooks: fn(run_context, self._workers, lame_workers) class ResetComputation(object): """Hook to reset a TPUEstimator computation loop. This hook shuts down all workers and resets the monitored session loop by throwing a CoordinatorResetError. """ def __init__(self): pass def __call__(self, run_context, all_workers, lame_workers): del run_context, lame_workers all_workers.shutdown() logging.info('Resetting coordinator.') raise CoordinatorResetError() class ShutdownLameWorkers(object): """Shutdown lamed workers. Processing will continue normally (typically by waiting for the down workers to be restarted). """ def __init__(self): pass def __call__(self, run_context, all_workers, lame_workers): lame_workers.shutdown() class ShutdownAllWorkers(object): """Shutdown all workers. Processing will continue normally (typically by waiting for the down workers to be restarted). """ def __init__(self): pass def __call__(self, run_context, all_workers, lame_workers): all_workers.shutdown()
apache-2.0
psyonara/agonizomai
sermons/models.py
1
5153
from __future__ import unicode_literals from django.db import models from django.template.defaultfilters import slugify from bible.models import BibleBook from useraccounts.models import UserAccount class Author(models.Model): name = models.CharField(null=False, blank=False, max_length=50) name_slug = models.SlugField(max_length=50, null=True, blank=True, db_index=True) def __str__(self): return self.name def save(self, *args, **kwargs): if self.name_slug is None or self.name_slug == "": self.name_slug = slugify(self.name) super(Author, self).save(*args, **kwargs) class AuthorSetting(models.Model): """ Holds user settings specific to an author. """ author = models.ForeignKey(Author, on_delete=models.CASCADE) user = models.ForeignKey("useraccounts.UserAccount", on_delete=models.CASCADE) name = models.CharField(max_length=30, db_index=True) value = models.CharField(max_length=50) class Series(models.Model): name = models.CharField(null=False, blank=False, max_length=100) name_slug = models.SlugField(max_length=100, null=True, blank=True, db_index=True) author = models.ForeignKey(Author, null=False, blank=False, on_delete=models.CASCADE) complete = models.BooleanField(default=False) def __str__(self): return "%s (%s)" % (self.name, self.author.name) def save(self, *args, **kwargs): if self.name_slug is None or self.name_slug == "": self.name_slug = slugify(self.name) super(Series, self).save(*args, **kwargs) class Sermon(models.Model): date_added = models.DateTimeField(auto_now_add=True) date_preached = models.DateField(null=True, blank=True) author = models.ForeignKey(Author, related_name="sermons", on_delete=models.CASCADE) title = models.CharField(null=False, blank=False, max_length=100) title_slug = models.SlugField(max_length=100, null=True, blank=True, db_index=True) series = models.ForeignKey( Series, null=True, blank=True, related_name="sermons", on_delete=models.CASCADE ) ref = models.CharField(max_length=20, null=True, blank=True) def get_audio_file(self): files = self.media_files.filter(media_type=1) return files[0] if len(files) > 0 else None def __str__(self): return "%s (by %s)" % (self.title, self.author.name) def save(self, *args, **kwargs): if self.title_slug is None or self.title_slug == "": self.title_slug = slugify(self.title) super(Sermon, self).save(*args, **kwargs) class Meta: ordering = ["-date_preached"] class ScriptureRef(models.Model): sermon = models.ForeignKey(Sermon, related_name="scripture_refs", on_delete=models.CASCADE) bible_book = models.ForeignKey(BibleBook, on_delete=models.CASCADE) chapter_begin = models.PositiveSmallIntegerField() chapter_end = models.PositiveSmallIntegerField() verse_begin = models.PositiveSmallIntegerField(null=True, blank=True) verse_end = models.PositiveSmallIntegerField(null=True, blank=True) def __str__(self): end_string = "" if self.chapter_begin == self.chapter_end: end_string += "%s %s" % (self.bible_book.name, self.chapter_begin) if self.verse_begin is not None and self.verse_end is not None: if self.verse_begin == self.verse_end: end_string += ":%s" % (self.verse_begin) else: end_string += ":%s-%s" % (self.verse_begin, self.verse_end) else: end_string += "%s %s" % (self.bible_book.name, self.chapter_begin) if self.verse_begin is None and self.verse_end is None: end_string += "-%s" % (self.chapter_end) else: end_string += ":%s-%s:%s" % (self.verse_begin, self.chapter_end, self.verse_end) return end_string class MediaFile(models.Model): MEDIA_TYPE_CHOICES = ((1, "audio"), (2, "video"), (3, "text"), (4, "pdf")) LOCATION_TYPE_CHOICES = ((1, "url"),) sermon = models.ForeignKey(Sermon, related_name="media_files", on_delete=models.CASCADE) media_type = models.PositiveSmallIntegerField(choices=MEDIA_TYPE_CHOICES, null=False, default=1) file_size = models.PositiveIntegerField(null=True, blank=True) location_type = models.PositiveSmallIntegerField( choices=LOCATION_TYPE_CHOICES, null=False, default=1 ) location = models.CharField(null=False, max_length=250) def __str__(self): return "%s (%s)" % (self.location, self.sermon.title) class SermonSession(models.Model): sermon = models.ForeignKey(Sermon, related_name="sessions", on_delete=models.CASCADE) session_started = models.DateTimeField(auto_now_add=True) session_updated = models.DateTimeField(auto_now=True) position = models.PositiveSmallIntegerField(default=0) # in seconds from start of file total_duration = models.PositiveSmallIntegerField(default=0) # in seconds user = models.ForeignKey(UserAccount, on_delete=models.CASCADE) completed = models.BooleanField(default=False)
mit
funshine/rpidemo
mqtt_oled/oled_test_luma.py
1
1273
#!/usr/bin/python/ # coding: utf-8 import time import datetime from luma.core.interface.serial import i2c, spi from luma.core.render import canvas from luma.oled.device import ssd1306, ssd1325, ssd1331, sh1106 def do_nothing(obj): pass # rev.1 users set port=0 # substitute spi(device=0, port=0) below if using that interface # serial = i2c(port=1, address=0x3C) serial = spi(device=0, port=0) # substitute ssd1331(...) or sh1106(...) below if using that device # device = ssd1306(serial, rotate=1) device = sh1106(serial) # device.cleanup = do_nothing print("Testing display Hello World") with canvas(device) as draw: draw.rectangle(device.bounding_box, outline="white", fill="black") draw.text((30, 40), "Hello World", fill="white") time.sleep(3) print("Testing display ON/OFF...") for _ in range(5): time.sleep(0.5) device.hide() time.sleep(0.5) device.show() print("Testing clear display...") time.sleep(2) device.clear() print("Testing screen updates...") time.sleep(2) for x in range(40): with canvas(device) as draw: now = datetime.datetime.now() draw.text((x, 4), str(now.date()), fill="white") draw.text((10, 16), str(now.time()), fill="white") time.sleep(0.1) print("Quit, cleanup...")
mit
VikParuchuri/evolve-music
midi/sequencer_alsa/sequencer.py
5
16997
import select import sequencer_alsa as S import midi __SWIG_NS_SET__ = set(['__class__', '__del__', '__delattr__', '__dict__', '__doc__', '__getattr__', '__getattribute__', '__hash__', '__init__', '__module__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__str__', '__swig_getmethods__', '__swig_setmethods__', '__weakref__', 'this', 'thisown']) def stringify(name, obj, indent=0): retstr = '' datafields = False if getattr(obj, 'this', False): datafields = dir(obj) # filter unwanted names datafields = list(set(datafields) - __SWIG_NS_SET__) retstr += '%s%s ::\n' % (' ' * indent, name) for key in datafields: value = getattr(obj, key, "n/a") retstr += stringify(key, value, indent+1) else: retstr += '%s%s: %s\n' % (' ' * indent, name, obj) return retstr class Sequencer(object): __ARGUMENTS__ = { 'alsa_sequencer_name':'__sequencer__', 'alsa_sequencer_stream':S.SND_SEQ_OPEN_DUPLEX, 'alsa_sequencer_mode':S.SND_SEQ_NONBLOCK, 'alsa_sequencer_type':'default', 'alsa_port_name':'__port__', 'alsa_port_caps':S.SND_SEQ_PORT_CAP_READ, 'alsa_port_type':S.SND_SEQ_PORT_TYPE_MIDI_GENERIC, 'alsa_queue_name':'__queue__', 'sequencer_tempo':120, 'sequencer_resolution':1000, } DefaultArguments = {} def __init__(self, **ns): # seed with baseline arguments self.__dict__.update(self.__ARGUMENTS__) # update with default arguments from concrete class self.__dict__.update(self.DefaultArguments) # apply user arguments self.__dict__.update(ns) self.client = None self._queue_running = False self._poll_descriptors = [] self.init() def __del__(self): if self.client: S.snd_seq_close(self.client) def init(self): self._init_handle() self._init_port() self._init_queue() def set_nonblock(self, nonblock=True): if nonblock: self.alsa_sequencer_mode = S.SND_SEQ_NONBLOCK else: self.alsa_sequencer_mode = 0 S.snd_seq_nonblock(self.client, self.alsa_sequencer_mode) def get_nonblock(self): if self.alsa_sequencer_mode == S.SND_SEQ_NONBLOCK: return True else: return False def _error(self, errcode): strerr = S.snd_strerror(errcode) msg = "ALSAError[%d]: %s" % (errcode, strerr) raise RuntimeError, msg def _init_handle(self): ret = S.open_client(self.alsa_sequencer_name, self.alsa_sequencer_type, self.alsa_sequencer_stream, self.alsa_sequencer_mode) if ret == None: # XXX: global error self._error(ret) self.client = ret self.client_id = S.snd_seq_client_id(self.client) self.output_buffer_size = S.snd_seq_get_output_buffer_size(self.client) self.input_buffer_size = S.snd_seq_get_input_buffer_size(self.client) self._set_poll_descriptors() def _init_port(self): err = S.snd_seq_create_simple_port(self.client, self.alsa_port_name, self.alsa_port_caps, self.alsa_port_type) if err < 0: self._error(err) self.port = err def _new_subscribe(self, sender, dest, read=True): subscribe = S.new_port_subscribe() if read: self.read_sender = sender self.read_dest = dest S.snd_seq_port_subscribe_set_sender(subscribe, self.read_sender) S.snd_seq_port_subscribe_set_dest(subscribe, self.read_dest) else: self.write_sender = sender self.write_dest = dest S.snd_seq_port_subscribe_set_sender(subscribe, self.write_sender) S.snd_seq_port_subscribe_set_dest(subscribe, self.write_dest) S.snd_seq_port_subscribe_set_queue(subscribe, self.queue) return subscribe def _subscribe_port(self, subscribe): err = S.snd_seq_subscribe_port(self.client, subscribe) if err < 0: self._error(err) def _my_address(self): addr = S.snd_seq_addr_t() addr.client = self.client_id addr.port = self.port return addr def _new_address(self, client, port): addr = S.snd_seq_addr_t() addr.client = int(client) addr.port = int(port) return addr def _init_queue(self): err = S.snd_seq_alloc_named_queue(self.client, self.alsa_queue_name) if err < 0: self._error(err) self.queue = err adjtempo = int(60.0 * 1000000.0 / self.sequencer_tempo) S.init_queue_tempo(self.client, self.queue, adjtempo, self.sequencer_resolution) def _control_queue(self, ctype, cvalue, event=None): err = S.snd_seq_control_queue(self.client, self.queue, ctype, cvalue, event) if err < 0: self._error(err) self.drain() def _set_event_broadcast(self, event): event.source.client = source.client event.source.port = source.port event.dest.client = S.SND_SEQ_ADDRESS_SUBSCRIBERS event.dest.port = S.SND_SEQ_ADDRESS_UNKNOWN def queue_get_tick_time(self): status = S.new_queue_status(self.client, self.queue) S.snd_seq_get_queue_status(self.client, self.queue, status) res = S.snd_seq_queue_status_get_tick_time(status) S.free_queue_status(status) return res def queue_get_real_time(self): status = S.new_queue_status(self.client, self.queue) S.snd_seq_get_queue_status(self.client, self.queue, status) res = S.snd_seq_queue_status_get_real_time(status) S.free_queue_status(status) return (res.tv_sec, res.tv_nsec) def change_tempo(self, tempo, event=None): adjbpm = int(60.0 * 1000000.0 / tempo) self._control_queue(S.SND_SEQ_EVENT_TEMPO, adjbpm, event) self.sequencer_tempo = tempo return True def start_sequencer(self, event=None): if not self._queue_running: self._control_queue(S.SND_SEQ_EVENT_START, 0, event) self._queue_running = True def continue_sequencer(self, event=None): if not self._queue_running: self._control_queue(S.SND_SEQ_EVENT_CONTINUE, 0, event) self._queue_running = True def stop_sequencer(self, event=None): if self._queue_running: self._control_queue(S.SND_SEQ_EVENT_STOP, 0, event) self._queue_running = False def drain(self): S.snd_seq_drain_output(self.client) def queue_eventlen(self): status = S.new_queue_status(self.client, self.queue) S.snd_seq_queue_status_get_events(status) def _set_poll_descriptors(self): self._poll_descriptors = S.client_poll_descriptors(self.client) def configure_poll(self, poll): for fd in self._poll_descriptors: poll.register(fd, select.POLLIN) def drop_output(self): S.snd_seq_drop_output_buffer(self.client) def output_pending(self): return S.snd_seq_event_output_pending(self.client) ## EVENT HANDLERS ## def event_write(self, event, direct=False, relative=False, tick=False): #print event.__class__, event ## Event Filter if isinstance(event, midi.EndOfTrackEvent): return seqev = S.snd_seq_event_t() ## common seqev.dest.client = self.write_dest.client seqev.dest.port = self.write_dest.port seqev.source.client = self.write_sender.client seqev.source.port = self.write_sender.port if direct: # no scheduling seqev.queue = S.SND_SEQ_QUEUE_DIRECT else: seqev.queue = self.queue seqev.flags &= (S.SND_SEQ_TIME_STAMP_MASK|S.SND_SEQ_TIME_MODE_MASK) if relative: seqev.flags |= S.SND_SEQ_TIME_MODE_REL else: seqev.flags |= S.SND_SEQ_TIME_MODE_ABS if tick: seqev.flags |= S.SND_SEQ_TIME_STAMP_TICK seqev.time.tick = event.tick else: seqev.flags |= S.SND_SEQ_TIME_STAMP_REAL sec = int(event.msdelay / 1000) nsec = int((event.msdelay - (sec * 1000)) * 1000000) seqev.time.time.tv_sec = sec seqev.time.time.tv_nsec = nsec ## Tempo Change if isinstance(event, midi.SetTempoEvent): adjtempo = int(60.0 * 1000000.0 / event.bpm) seqev.type = S.SND_SEQ_EVENT_TEMPO seqev.dest.client = S.SND_SEQ_CLIENT_SYSTEM seqev.dest.port = S.SND_SEQ_PORT_SYSTEM_TIMER seqev.data.queue.queue = self.queue seqev.data.queue.param.value = adjtempo ## Note Event elif isinstance(event, midi.NoteEvent): if isinstance(event, midi.NoteOnEvent): seqev.type = S.SND_SEQ_EVENT_NOTEON if isinstance(event, midi.NoteOffEvent): seqev.type = S.SND_SEQ_EVENT_NOTEOFF seqev.data.note.channel = event.channel seqev.data.note.note = event.pitch seqev.data.note.velocity = event.velocity ## Control Change elif isinstance(event, midi.ControlChangeEvent): seqev.type = S.SND_SEQ_EVENT_CONTROLLER seqev.data.control.channel = event.channel seqev.data.control.param = event.control seqev.data.control.value = event.value ## Program Change elif isinstance(event, midi.ProgramChangeEvent): seqev.type = S.SND_SEQ_EVENT_PGMCHANGE seqev.data.control.channel = event.channel seqev.data.control.value = event.value ## Pitch Bench elif isinstance(event, midi.PitchWheelEvent): seqev.type = S.SND_SEQ_EVENT_PITCHBEND seqev.data.control.channel = event.channel seqev.data.control.value = event.pitch ## Unknown else: print "Warning :: Unknown event type: %s" % event return None err = S.snd_seq_event_output(self.client, seqev) if (err < 0): self._error(err) self.drain() return self.output_buffer_size - err def event_read(self): ev = S.event_input(self.client) if ev and (ev < 0): self._error(ev) if ev and ev.type in (S.SND_SEQ_EVENT_NOTEON, S.SND_SEQ_EVENT_NOTEOFF): if ev.type == S.SND_SEQ_EVENT_NOTEON: mev = midi.NoteOnEvent() mev.channel = ev.data.note.channel mev.pitch = ev.data.note.note mev.velocity = ev.data.note.velocity elif ev.type == S.SND_SEQ_EVENT_NOTEOFF: mev = midi.NoteOffEvent() mev.channel = ev.data.note.channel mev.pitch = ev.data.note.note mev.velocity = ev.data.note.velocity if ev.time.time.tv_nsec: # convert to ms mev.msdeay = \ (ev.time.time.tv_nsec / 1e6) + (ev.time.time.tv_sec * 1e3) else: mev.tick = ev.time.tick return mev else: return None class SequencerHardware(Sequencer): SequencerName = "__hw__" SequencerStream = S.SND_SEQ_OPEN_DUPLEX SequencerType = "hw" SequencerMode = 0 class Client(object): def __init__(self, client, name): self.client = client self.name = name self._ports = {} def __str__(self): retstr = '] client(%d) "%s"\n' % (self.client, self.name) for port in self: retstr += str(port) return retstr def add_port(self, port, name, caps): port = self.Port(port, name, caps) self._ports[name] = port def __iter__(self): return self._ports.itervalues() def __len__(self): return len(self._ports) def get_port(self, key): return self._ports[key] __getitem__ = get_port class Port(object): def __init__(self, port, name, caps): self.port = port self.name = name self.caps = caps self.caps_read = self.caps & S.SND_SEQ_PORT_CAP_READ self.caps_write = self.caps & S.SND_SEQ_PORT_CAP_WRITE self.caps_subs_read = self.caps & S.SND_SEQ_PORT_CAP_SUBS_READ self.caps_subs_write = self.caps & S.SND_SEQ_PORT_CAP_SUBS_WRITE def __str__(self): flags = [] if self.caps_read: flags.append('r') if self.caps_write: flags.append('w') if self.caps_subs_read: flags.append('sender') if self.caps_subs_write: flags.append('receiver') flags = str.join(', ', flags) retstr = '] port(%d) [%s] "%s"\n' % (self.port, flags, self.name) return retstr def init(self): self._clients = {} self._init_handle() self._query_clients() def __iter__(self): return self._clients.itervalues() def __len__(self): return len(self._clients) def get_client(self, key): return self._clients[key] __getitem__ = get_client def get_client_and_port(self, cname, pname): client = self[cname] port = client[pname] return (client.client, port.port) def __str__(self): retstr = '' for client in self: retstr += str(client) return retstr def _query_clients(self): self._clients = {} S.snd_seq_drop_output(self.client) cinfo = S.new_client_info() pinfo = S.new_port_info() S.snd_seq_client_info_set_client(cinfo, -1) # for each client while S.snd_seq_query_next_client(self.client, cinfo) >= 0: client = S.snd_seq_client_info_get_client(cinfo) cname = S.snd_seq_client_info_get_name(cinfo) cobj = self.Client(client, cname) self._clients[cname] = cobj # get port data S.snd_seq_port_info_set_client(pinfo, client) S.snd_seq_port_info_set_port(pinfo, -1) while (S.snd_seq_query_next_port(self.client, pinfo) >= 0): cap = S.snd_seq_port_info_get_capability(pinfo) client = S.snd_seq_port_info_get_client(pinfo) port = S.snd_seq_port_info_get_port(pinfo) pname = S.snd_seq_port_info_get_name(pinfo) cobj.add_port(port, pname, cap) class SequencerRead(Sequencer): DefaultArguments = { 'sequencer_name':'__SequencerRead__', 'sequencer_stream':not S.SND_SEQ_NONBLOCK, 'alsa_port_caps':S.SND_SEQ_PORT_CAP_WRITE | S.SND_SEQ_PORT_CAP_SUBS_WRITE, } def subscribe_port(self, client, port): sender = self._new_address(client, port) dest = self._my_address() subscribe = self._new_subscribe(sender, dest, read=True) S.snd_seq_port_subscribe_set_time_update(subscribe, True) #S.snd_seq_port_subscribe_set_time_real(subscribe, True) self._subscribe_port(subscribe) class SequencerWrite(Sequencer): DefaultArguments = { 'sequencer_name':'__SequencerWrite__', 'sequencer_stream':not S.SND_SEQ_NONBLOCK, 'alsa_port_caps':S.SND_SEQ_PORT_CAP_READ | S.SND_SEQ_PORT_CAP_SUBS_READ } def subscribe_port(self, client, port): sender = self._my_address() dest = self._new_address(client, port) subscribe = self._new_subscribe(sender, dest, read=False) self._subscribe_port(subscribe) class SequencerDuplex(Sequencer): DefaultArguments = { 'sequencer_name':'__SequencerWrite__', 'sequencer_stream':not S.SND_SEQ_NONBLOCK, 'alsa_port_caps':S.SND_SEQ_PORT_CAP_READ | S.SND_SEQ_PORT_CAP_SUBS_READ | S.SND_SEQ_PORT_CAP_WRITE | S.SND_SEQ_PORT_CAP_SUBS_WRITE } def subscribe_read_port(self, client, port): sender = self._new_address(client, port) dest = self._my_address() subscribe = self._new_subscribe(sender, dest, read=True) S.snd_seq_port_subscribe_set_time_update(subscribe, True) #S.snd_seq_port_subscribe_set_time_real(subscribe, True) self._subscribe_port(subscribe) def subscribe_write_port(self, client, port): sender = self._my_address() dest = self._new_address(client, port) subscribe = self._new_subscribe(sender, dest, read=False) self._subscribe_port(subscribe)
agpl-3.0
zhjunlang/kbengine
kbe/src/lib/python/Lib/encodings/iso2022_jp_3.py
816
1061
# # iso2022_jp_3.py: Python Unicode Codec for ISO2022_JP_3 # # Written by Hye-Shik Chang <[email protected]> # import _codecs_iso2022, codecs import _multibytecodec as mbc codec = _codecs_iso2022.getcodec('iso2022_jp_3') class Codec(codecs.Codec): encode = codec.encode decode = codec.decode class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): codec = codec class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): codec = codec class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): codec = codec class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): codec = codec def getregentry(): return codecs.CodecInfo( name='iso2022_jp_3', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, )
lgpl-3.0
sahiljain/catapult
third_party/gsutil/third_party/boto/boto/logs/layer1.py
146
22588
# Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # import boto from boto.connection import AWSQueryConnection from boto.regioninfo import RegionInfo from boto.exception import JSONResponseError from boto.logs import exceptions from boto.compat import json class CloudWatchLogsConnection(AWSQueryConnection): """ Amazon CloudWatch Logs Service API Reference This is the Amazon CloudWatch Logs API Reference . Amazon CloudWatch Logs is a managed service for real time monitoring and archival of application logs. This guide provides detailed information about Amazon CloudWatch Logs actions, data types, parameters, and errors. For detailed information about Amazon CloudWatch Logs features and their associated API calls, go to the `Amazon CloudWatch Logs Developer Guide`_. Use the following links to get started using the Amazon CloudWatch API Reference : + `Actions`_: An alphabetical list of all Amazon CloudWatch Logs actions. + `Data Types`_: An alphabetical list of all Amazon CloudWatch Logs data types. + `Common Parameters`_: Parameters that all Query actions can use. + `Common Errors`_: Client and server errors that all actions can return. + `Regions and Endpoints`_: Itemized regions and endpoints for all AWS products. In addition to using the Amazon CloudWatch Logs API, you can also use the following SDKs and third-party libraries to access Amazon CloudWatch Logs programmatically. + `AWS SDK for Java Documentation`_ + `AWS SDK for .NET Documentation`_ + `AWS SDK for PHP Documentation`_ + `AWS SDK for Ruby Documentation`_ Developers in the AWS developer community also provide their own libraries, which you can find at the following AWS developer centers: + `AWS Java Developer Center`_ + `AWS PHP Developer Center`_ + `AWS Python Developer Center`_ + `AWS Ruby Developer Center`_ + `AWS Windows and .NET Developer Center`_ """ APIVersion = "2014-03-28" DefaultRegionName = "us-east-1" DefaultRegionEndpoint = "logs.us-east-1.amazonaws.com" ServiceName = "CloudWatchLogs" TargetPrefix = "Logs_20140328" ResponseError = JSONResponseError _faults = { "LimitExceededException": exceptions.LimitExceededException, "DataAlreadyAcceptedException": exceptions.DataAlreadyAcceptedException, "ResourceInUseException": exceptions.ResourceInUseException, "ServiceUnavailableException": exceptions.ServiceUnavailableException, "InvalidParameterException": exceptions.InvalidParameterException, "ResourceNotFoundException": exceptions.ResourceNotFoundException, "ResourceAlreadyExistsException": exceptions.ResourceAlreadyExistsException, "OperationAbortedException": exceptions.OperationAbortedException, "InvalidSequenceTokenException": exceptions.InvalidSequenceTokenException, } def __init__(self, **kwargs): region = kwargs.pop('region', None) if not region: region = RegionInfo(self, self.DefaultRegionName, self.DefaultRegionEndpoint) if 'host' not in kwargs or kwargs['host'] is None: kwargs['host'] = region.endpoint super(CloudWatchLogsConnection, self).__init__(**kwargs) self.region = region def _required_auth_capability(self): return ['hmac-v4'] def create_log_group(self, log_group_name): """ Creates a new log group with the specified name. The name of the log group must be unique within a region for an AWS account. You can create up to 100 log groups per account. You must use the following guidelines when naming a log group: + Log group names can be between 1 and 512 characters long. + Allowed characters are az, AZ, 09, '_' (underscore), '-' (hyphen), '/' (forward slash), and '.' (period). Log groups are created with a default retention of 14 days. The retention attribute allow you to configure the number of days you want to retain log events in the specified log group. See the `SetRetention` operation on how to modify the retention of your log groups. :type log_group_name: string :param log_group_name: """ params = {'logGroupName': log_group_name, } return self.make_request(action='CreateLogGroup', body=json.dumps(params)) def create_log_stream(self, log_group_name, log_stream_name): """ Creates a new log stream in the specified log group. The name of the log stream must be unique within the log group. There is no limit on the number of log streams that can exist in a log group. You must use the following guidelines when naming a log stream: + Log stream names can be between 1 and 512 characters long. + The ':' colon character is not allowed. :type log_group_name: string :param log_group_name: :type log_stream_name: string :param log_stream_name: """ params = { 'logGroupName': log_group_name, 'logStreamName': log_stream_name, } return self.make_request(action='CreateLogStream', body=json.dumps(params)) def delete_log_group(self, log_group_name): """ Deletes the log group with the specified name. Amazon CloudWatch Logs will delete a log group only if there are no log streams and no metric filters associated with the log group. If this condition is not satisfied, the request will fail and the log group will not be deleted. :type log_group_name: string :param log_group_name: """ params = {'logGroupName': log_group_name, } return self.make_request(action='DeleteLogGroup', body=json.dumps(params)) def delete_log_stream(self, log_group_name, log_stream_name): """ Deletes a log stream and permanently deletes all the archived log events associated with it. :type log_group_name: string :param log_group_name: :type log_stream_name: string :param log_stream_name: """ params = { 'logGroupName': log_group_name, 'logStreamName': log_stream_name, } return self.make_request(action='DeleteLogStream', body=json.dumps(params)) def delete_metric_filter(self, log_group_name, filter_name): """ Deletes a metric filter associated with the specified log group. :type log_group_name: string :param log_group_name: :type filter_name: string :param filter_name: The name of the metric filter. """ params = { 'logGroupName': log_group_name, 'filterName': filter_name, } return self.make_request(action='DeleteMetricFilter', body=json.dumps(params)) def delete_retention_policy(self, log_group_name): """ :type log_group_name: string :param log_group_name: """ params = {'logGroupName': log_group_name, } return self.make_request(action='DeleteRetentionPolicy', body=json.dumps(params)) def describe_log_groups(self, log_group_name_prefix=None, next_token=None, limit=None): """ Returns all the log groups that are associated with the AWS account making the request. The list returned in the response is ASCII-sorted by log group name. By default, this operation returns up to 50 log groups. If there are more log groups to list, the response would contain a `nextToken` value in the response body. You can also limit the number of log groups returned in the response by specifying the `limit` parameter in the request. :type log_group_name_prefix: string :param log_group_name_prefix: :type next_token: string :param next_token: A string token used for pagination that points to the next page of results. It must be a value obtained from the response of the previous `DescribeLogGroups` request. :type limit: integer :param limit: The maximum number of items returned in the response. If you don't specify a value, the request would return up to 50 items. """ params = {} if log_group_name_prefix is not None: params['logGroupNamePrefix'] = log_group_name_prefix if next_token is not None: params['nextToken'] = next_token if limit is not None: params['limit'] = limit return self.make_request(action='DescribeLogGroups', body=json.dumps(params)) def describe_log_streams(self, log_group_name, log_stream_name_prefix=None, next_token=None, limit=None): """ Returns all the log streams that are associated with the specified log group. The list returned in the response is ASCII-sorted by log stream name. By default, this operation returns up to 50 log streams. If there are more log streams to list, the response would contain a `nextToken` value in the response body. You can also limit the number of log streams returned in the response by specifying the `limit` parameter in the request. :type log_group_name: string :param log_group_name: :type log_stream_name_prefix: string :param log_stream_name_prefix: :type next_token: string :param next_token: A string token used for pagination that points to the next page of results. It must be a value obtained from the response of the previous `DescribeLogStreams` request. :type limit: integer :param limit: The maximum number of items returned in the response. If you don't specify a value, the request would return up to 50 items. """ params = {'logGroupName': log_group_name, } if log_stream_name_prefix is not None: params['logStreamNamePrefix'] = log_stream_name_prefix if next_token is not None: params['nextToken'] = next_token if limit is not None: params['limit'] = limit return self.make_request(action='DescribeLogStreams', body=json.dumps(params)) def describe_metric_filters(self, log_group_name, filter_name_prefix=None, next_token=None, limit=None): """ Returns all the metrics filters associated with the specified log group. The list returned in the response is ASCII-sorted by filter name. By default, this operation returns up to 50 metric filters. If there are more metric filters to list, the response would contain a `nextToken` value in the response body. You can also limit the number of metric filters returned in the response by specifying the `limit` parameter in the request. :type log_group_name: string :param log_group_name: :type filter_name_prefix: string :param filter_name_prefix: The name of the metric filter. :type next_token: string :param next_token: A string token used for pagination that points to the next page of results. It must be a value obtained from the response of the previous `DescribeMetricFilters` request. :type limit: integer :param limit: The maximum number of items returned in the response. If you don't specify a value, the request would return up to 50 items. """ params = {'logGroupName': log_group_name, } if filter_name_prefix is not None: params['filterNamePrefix'] = filter_name_prefix if next_token is not None: params['nextToken'] = next_token if limit is not None: params['limit'] = limit return self.make_request(action='DescribeMetricFilters', body=json.dumps(params)) def get_log_events(self, log_group_name, log_stream_name, start_time=None, end_time=None, next_token=None, limit=None, start_from_head=None): """ Retrieves log events from the specified log stream. You can provide an optional time range to filter the results on the event `timestamp`. By default, this operation returns as much log events as can fit in a response size of 1MB, up to 10,000 log events. The response will always include a `nextForwardToken` and a `nextBackwardToken` in the response body. You can use any of these tokens in subsequent `GetLogEvents` requests to paginate through events in either forward or backward direction. You can also limit the number of log events returned in the response by specifying the `limit` parameter in the request. :type log_group_name: string :param log_group_name: :type log_stream_name: string :param log_stream_name: :type start_time: long :param start_time: A point in time expressed as the number milliseconds since Jan 1, 1970 00:00:00 UTC. :type end_time: long :param end_time: A point in time expressed as the number milliseconds since Jan 1, 1970 00:00:00 UTC. :type next_token: string :param next_token: A string token used for pagination that points to the next page of results. It must be a value obtained from the `nextForwardToken` or `nextBackwardToken` fields in the response of the previous `GetLogEvents` request. :type limit: integer :param limit: The maximum number of log events returned in the response. If you don't specify a value, the request would return as much log events as can fit in a response size of 1MB, up to 10,000 log events. :type start_from_head: boolean :param start_from_head: """ params = { 'logGroupName': log_group_name, 'logStreamName': log_stream_name, } if start_time is not None: params['startTime'] = start_time if end_time is not None: params['endTime'] = end_time if next_token is not None: params['nextToken'] = next_token if limit is not None: params['limit'] = limit if start_from_head is not None: params['startFromHead'] = start_from_head return self.make_request(action='GetLogEvents', body=json.dumps(params)) def put_log_events(self, log_group_name, log_stream_name, log_events, sequence_token=None): """ Uploads a batch of log events to the specified log stream. Every PutLogEvents request must include the `sequenceToken` obtained from the response of the previous request. An upload in a newly created log stream does not require a `sequenceToken`. The batch of events must satisfy the following constraints: + The maximum batch size is 32,768 bytes, and this size is calculated as the sum of all event messages in UTF-8, plus 26 bytes for each log event. + None of the log events in the batch can be more than 2 hours in the future. + None of the log events in the batch can be older than 14 days or the retention period of the log group. + The log events in the batch must be in chronological ordered by their `timestamp`. + The maximum number of log events in a batch is 1,000. :type log_group_name: string :param log_group_name: :type log_stream_name: string :param log_stream_name: :type log_events: list :param log_events: A list of events belonging to a log stream. :type sequence_token: string :param sequence_token: A string token that must be obtained from the response of the previous `PutLogEvents` request. """ params = { 'logGroupName': log_group_name, 'logStreamName': log_stream_name, 'logEvents': log_events, } if sequence_token is not None: params['sequenceToken'] = sequence_token return self.make_request(action='PutLogEvents', body=json.dumps(params)) def put_metric_filter(self, log_group_name, filter_name, filter_pattern, metric_transformations): """ Creates or updates a metric filter and associates it with the specified log group. Metric filters allow you to configure rules to extract metric data from log events ingested through `PutLogEvents` requests. :type log_group_name: string :param log_group_name: :type filter_name: string :param filter_name: The name of the metric filter. :type filter_pattern: string :param filter_pattern: :type metric_transformations: list :param metric_transformations: """ params = { 'logGroupName': log_group_name, 'filterName': filter_name, 'filterPattern': filter_pattern, 'metricTransformations': metric_transformations, } return self.make_request(action='PutMetricFilter', body=json.dumps(params)) def put_retention_policy(self, log_group_name, retention_in_days): """ :type log_group_name: string :param log_group_name: :type retention_in_days: integer :param retention_in_days: Specifies the number of days you want to retain log events in the specified log group. Possible values are: 1, 3, 5, 7, 14, 30, 60, 90, 120, 150, 180, 365, 400, 547, 730. """ params = { 'logGroupName': log_group_name, 'retentionInDays': retention_in_days, } return self.make_request(action='PutRetentionPolicy', body=json.dumps(params)) def set_retention(self, log_group_name, retention_in_days): """ Sets the retention of the specified log group. Log groups are created with a default retention of 14 days. The retention attribute allow you to configure the number of days you want to retain log events in the specified log group. :type log_group_name: string :param log_group_name: :type retention_in_days: integer :param retention_in_days: Specifies the number of days you want to retain log events in the specified log group. Possible values are: 1, 3, 5, 7, 14, 30, 60, 90, 120, 150, 180, 365, 400, 547, 730. """ params = { 'logGroupName': log_group_name, 'retentionInDays': retention_in_days, } return self.make_request(action='SetRetention', body=json.dumps(params)) def test_metric_filter(self, filter_pattern, log_event_messages): """ Tests the filter pattern of a metric filter against a sample of log event messages. You can use this operation to validate the correctness of a metric filter pattern. :type filter_pattern: string :param filter_pattern: :type log_event_messages: list :param log_event_messages: """ params = { 'filterPattern': filter_pattern, 'logEventMessages': log_event_messages, } return self.make_request(action='TestMetricFilter', body=json.dumps(params)) def make_request(self, action, body): headers = { 'X-Amz-Target': '%s.%s' % (self.TargetPrefix, action), 'Host': self.region.endpoint, 'Content-Type': 'application/x-amz-json-1.1', 'Content-Length': str(len(body)), } http_request = self.build_base_http_request( method='POST', path='/', auth_path='/', params={}, headers=headers, data=body) response = self._mexe(http_request, sender=None, override_num_retries=10) response_body = response.read().decode('utf-8') boto.log.debug(response_body) if response.status == 200: if response_body: return json.loads(response_body) else: json_body = json.loads(response_body) fault_name = json_body.get('__type', None) exception_class = self._faults.get(fault_name, self.ResponseError) raise exception_class(response.status, response.reason, body=json_body)
bsd-3-clause
nathandunn/jbrowse
tests/selenium_tests/jbrowse_selenium/JBrowseTest.py
2
12948
import os import time import re import unittest from selenium import webdriver from selenium.webdriver import ActionChains from selenium.webdriver.common.by import By from selenium.webdriver.common.keys import Keys from selenium.common.exceptions import NoSuchElementException from selenium.webdriver.support.ui import Select from selenium.webdriver.support.wait import WebDriverWait from selenium.webdriver.support import expected_conditions as EC import track_selectors class JBrowseTest (object): data_dir = None base_url = None # this "time dilation" factor hugely increases all our fixed waiting times when # running under Travis CI, because the Travis environment is much slower than a desktop. time_dilation = 12 if os.getenv('CI','false') == 'true' else 1 tracksel_type = 'Hierarchical' ## TestCase override - use instead of constructor def setUp( self ): self.track_selector = getattr( track_selectors, '%sTrackSelector' % self.tracksel_type )( self, self.time_dilation ) #self.browser = self._getChrome() self.browser = self._getBrowser() if self.base_url and self.data_dir: self.browser.get(self.base_url+self.data_dir) else: base = self.baseURL() self.browser.get( base + ( '&' if base.find('?') >= 0 else '?' ) + ( "data="+self.data_dir if self.data_dir else "" ) ) if not os.getenv('DEBUG'): self.addCleanup(self.browser.quit) self._waits_for_load() def _getBrowser( self ): browser = os.getenv('SELENIUM_BROWSER','Firefox').lower() if browser == 'firefox': fp = webdriver.FirefoxProfile() fp.set_preference("browser.download.folderList",2) fp.set_preference("browser.download.manager.showWhenStarting",False) fp.set_preference("browser.download.dir", os.getcwd()) fp.set_preference("browser.helperApps.neverAsk.saveToDisk","application/x-bedgraph,application/x-wiggle,application/x-bed") fp.set_preference('webdriver.log.driver.ignore',True) return webdriver.Firefox( firefox_profile = fp ) elif browser == 'chrome': options = webdriver.ChromeOptions() if os.getenv('CHROME_HEADLESS'): options.add_argument('headless') options.add_argument('disable-gpu') return webdriver.Chrome(chrome_options=options) elif browser == 'phantom' or browser == 'phantomjs': return webdriver.PhantomJS() elif browser == 'travis_saucelabs': username = os.environ["SAUCE_USERNAME"] access_key = os.environ["SAUCE_ACCESS_KEY"] #capabilities["tunnel-identifier"] = os.environ["TRAVIS_JOB_NUMBER"] hub_url = "%s:%s@localhost:4445" % (username, access_key) return webdriver.Remote(desired_capabilities=capabilities, command_executor="https://%s/wd/hub" % hub_url) else: raise Exception('invalid browser name', 'invalid browser name "%s"' % browser) def baseURL( self ): if not self.base_url: self.base_url = os.environ['JBROWSE_URL'] if 'JBROWSE_URL' in os.environ else "http://localhost/jbrowse/index.html" return self.base_url ## convenience methods for us def assert_element(self, expression , time=5): self._waits_for_element(expression, time*self.time_dilation) try: if expression.find('/') >= 0: el = self.browser.find_element_by_xpath(expression) else: el = self.browser.find_element_by_css_selector( expression ) except NoSuchElementException: raise AssertionError ("can't find %s" %expression) return el def assert_elements( self, expression ): self._waits_for_elements( expression, 3*self.time_dilation ) try: if '/' in expression: el = self.browser.find_elements_by_xpath( expression ) else: el = self.browser.find_elements_by_css_selector( expression ) except NoSuchElementException: raise AssertionError ("can't find %s" %expression) return el def assert_track( self, tracktext ): trackPath = "//div[contains(@class,'track-label')][contains(.,'%s')]" %tracktext self._waits_for_element( trackPath ) def assert_no_element( self, expression ): self._waits_for_no_element( expression ) def assert_no_js_errors( self ): assert self.browser.find_element_by_xpath('/html/body') \ .get_attribute('JSError') == None # Find the query box and put f15 into it and hit enter def do_typed_query( self, text ): qbox = self.browser.find_element_by_id("location") qbox.clear() qbox.send_keys( text ) qbox.send_keys( Keys.RETURN ) def _rubberband( self, el_xpath, start_pct, end_pct, modkey = None ): el = self.assert_element( el_xpath ) start_offset = el.size['width'] * start_pct - el.size['width']/2 c = self.actionchains() \ .move_to_element( el ) \ .move_by_offset( start_offset, 0 ) if( modkey ): c = c.key_down( modkey ) c = c \ .click_and_hold( None ) \ .move_by_offset( el.size['width']*(end_pct-start_pct), 0 ) \ .release( None ) if( modkey ): c = c.key_up( modkey ) c.perform() self.assert_no_js_errors() def export_track( self, track_name, region, file_format, button ): self.track_menu_click( track_name, 'Save') # test view export self.assert_element("//div[@id='exportDialog']//label[contains(.,'%s')]" % region ).click() self.assert_element("//div[@id='exportDialog']//label[contains(.,'%s')]" % file_format ).click() self.assert_element("//div[@id='exportDialog']//*[contains(@class,'dijitButton')]//*[contains(@class,'dijitButtonText')][contains(.,'%s')]" % button ).click() self.wait_for_dialog_disappearance() self.assert_no_js_errors() def close_dialog( self, title ): dialog = "//*[@class='dijitDialogTitle'][contains(text(),'%s')]/../span[contains(@class,'dijitDialogCloseIcon')]" % title self.assert_element(dialog).click() self.assert_no_element(dialog) self.wait_for_dialog_disappearance() self.assert_no_js_errors() def wait_for_dialog_appearance( self, t=5): #WebDriverWait(self, t).until(lambda self: not self.browser.find_element_by_css_selector( '.dijitDialogUnderlayWrapper').is_displayed()) time.sleep(1*self.time_dilation) #pass def wait_for_dialog_disappearance( self, t=5): #WebDriverWait(self, t).until(lambda self: not self.browser.find_element_by_css_selector( '.dijitDialogUnderlayWrapper').is_displayed()) time.sleep(1*self.time_dilation) #pass def track_menu_click( self, track_name, item_name ): menuButton = "//div[contains(@class,'track_%s')]//div[contains(@class,'track-label')]//div[contains(@class,'track-menu-button')]" \ % re.sub( '\W', '_', track_name.lower()) self.assert_element(menuButton).click() self.menu_item_click( item_name ) def menu_item_click( self, text ): menuItem = "//div[contains(@class,'dijitMenuPopup')][not(contains(@style,'display: none'))] \ //td[contains(@class,'dijitMenuItemLabel')][contains(.,'%s')]" % text self.assert_element(menuItem).click() def overview_rubberband( self, start_pct, end_pct ): """Executes a rubberband gesture from start_pct to end_pct on the overview bar""" self._rubberband( "//div[@id='overview']", start_pct, end_pct ) # I can't get a mainscale_rubberband() working, can't find an # element to tell selenium to move to that will hit it. can't # move to the scale itself because it's so wide. def trackpane_rubberband( self, start_pct, end_pct ): """Executes a rubberband gesture from start_pct to end_pct in the main track pane""" self._rubberband( "//div[contains(@class,'dragWindow')]", start_pct, end_pct, Keys.SHIFT ) def is_track_on( self, tracktext ): # find the track label in the track pane return self.does_element_exist( \ "//div[contains(@class,'track-label')]/span[contains(@class,'track-label-text')][contains(.,'%s')]" % tracktext ) def turn_on_track( self, tracktext ): return self.track_selector.turn_on_track( tracktext ) def turn_off_track( self, tracktext ): return self.track_selector.turn_off_track( tracktext ) def actionchains( self ): return ActionChains( self.browser ) def get_track_labels_containing( self, string ): return self.assert_elements( "//span[contains(@class,'track-label-text')][contains(.,'%s')]" % string ) def _waits_for_elements( self, expression, time=5): WebDriverWait(self, time*self.time_dilation).until(lambda self: self.do_elements_exist(expression)) def _waits_for_element( self, expression, time=5 ): WebDriverWait(self, time*self.time_dilation).until(lambda self: self.does_element_exist(expression)) def _waits_for_no_element( self, expression, time=5 ): WebDriverWait(self, time*self.time_dilation).until(lambda self: not self.does_element_exist(expression)) # Wait until faceted browser has narrowed results to one track def wait_until_one_track(self): WebDriverWait(self, 5*self.time_dilation).until(lambda self: self.is_one_row()) # Return true/false if faceted browser narrowed down to one track def is_one_row(self): return self.assert_elements("div.dojoxGridRow").__len__() == 1 # Return true/false if element exists def does_element_exist (self, expression): try: if expression.find('/') >= 0: self.browser.find_element_by_xpath( expression ) else: self.browser.find_element_by_css_selector( expression ) return True except NoSuchElementException: return False # Return true/false if elements exist def do_elements_exist (self, expression): try: if expression.find('/') >= 0: self.browser.find_elements_by_xpath( expression ) else: self.browser.find_elements_by_css_selector( expression ) return True except NoSuchElementException: return False def click_search_disambiguation( self, trackName, buttonText): self.wait_for_dialog_appearance() xpath = ( '//*[contains(@class,"dijitDialogPaneContent")]' '//td[contains(@class,"field-tracks")][contains(.,"%s")]' '/../td[contains(@class,"goButtonColumn")]' '//*[contains(@class,"dijitButton")][contains(.,"%s")]' ) % (trackName, buttonText) #print(xpath) self.assert_element(xpath).click() self.wait_for_dialog_disappearance() def select_refseq( self, name ): self.do_typed_query( name ) def scroll( self ): move_right_button = self.browser.find_element_by_id('moveRight') move_right_button.click() self.waits_for_scroll(self.browser.title) move_left_button = self.browser.find_element_by_id('moveLeft') move_left_button.click() self.waits_for_scroll(self.browser.title) self.assert_no_js_errors() # scroll back and forth with the mouse self.actionchains() \ .move_to_element( move_right_button ) \ .move_by_offset( 0, 300 ) \ .click_and_hold( None ) \ .move_by_offset( 300, 0 ) \ .release( None ) \ .move_by_offset( -100,100 ) \ .click_and_hold( None ) \ .move_by_offset( -300, 0 ) \ .release( None ) \ .perform() self.assert_no_js_errors() # waits for the title of the page to change, since it # gets updated after the scroll animation def waits_for_scroll ( self, location ): WebDriverWait(self, 5*self.time_dilation).until(lambda self: self.browser.title != location) #Exists because onload() get trigered before JBrowse is ready def _waits_for_load(self): WebDriverWait(self.browser, 5*self.time_dilation).until(lambda self: "data=" in self.current_url or "js_tests") if "data=nonexistent" in self.browser.current_url: #account for the test for bad data pass elif "js_tests" in self.browser.current_url: #account for jasmine tests pass else: self.waits_for_scroll("JBrowse")
lgpl-2.1
alfkjartan/nvgimu
nvg/maths/matrices.py
2
6689
""" Utilities for working with matrices. """ # Copyright (C) 2009-2011 University of Edinburgh # # This file is part of IMUSim. # # IMUSim is free software: you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # IMUSim is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with IMUSim. If not, see <http://www.gnu.org/licenses/>. from __future__ import division import numpy as np import math import operator from itertools import izip _rotationMatrices = dict( x = lambda rx: np.matrix(( (1,0,0), (0,math.cos(rx),-math.sin(rx)), (0,math.sin(rx),math.cos(rx))),dtype=float), y = lambda ry: np.matrix(( (math.cos(ry),0,math.sin(ry)), (0,1,0), (-math.sin(ry),0,math.cos(ry))),dtype=float), z = lambda rz: np.matrix(( (math.cos(rz),-math.sin(rz),0), (math.sin(rz),math.cos(rz),0), (0,0,1)),dtype=float)) _EPS = 1e-12 _eulerFuncs = dict( xyz = lambda m: \ (np.arctan2(-m[1,2], m[2,2]), np.arcsin(m[0,2]), np.arctan2(-m[0,1], m[0,0])) if abs(m[0,2]) < 1 - _EPS \ else (np.arctan2(m[1,0], m[1,1]), np.pi/2, 0) if m[0,2] > 0 \ else (-np.arctan2(m[1,0], m[1,1]), -np.pi/2, 0), xzy = lambda m: \ (np.arctan2(m[2,1], m[1,1]), np.arcsin(-m[0,1]), np.arctan2(m[0,2], m[0,0])) if abs(m[0,1]) < 1 - _EPS \ else (np.arctan2(-m[2,0], m[2,2]), -np.pi/2, 0) if m[0,1] > 0 \ else (-np.arctan2(-m[2,0], m[2,2]), np.pi/2, 0), yxz = lambda m: \ (np.arctan2(m[0,2], m[2,2]), np.arcsin(-m[1,2]), np.arctan2(m[1,0], m[1,1])) if abs(m[1,2]) < 1 - _EPS \ else (np.arctan2(-m[0,1], m[0,0]), -np.pi/2, 0) if m[1,2] > 0 \ else (-np.arctan2(-m[0,1], m[0,0]), np.pi/2, 0), yzx = lambda m: \ (np.arctan2(-m[2,0], m[0,0]), np.arcsin(m[1,0]), np.arctan2(-m[1,2], m[1,1])) if abs(m[1,0]) < 1 - _EPS \ else (np.arctan2(m[2,1], m[2,2]), np.pi/2, 0) if m[1,0] > 0 \ else (-np.arctan2(m[2,1], m[2,2]), -np.pi/2, 0), zxy = lambda m: \ (np.arctan2(-m[0,1], m[1,1]), np.arcsin(m[2,1]), np.arctan2(-m[2,0], m[2,2])) if abs(m[2,1]) < 1 - _EPS \ else (np.arctan2(m[0,2], m[0,0]), np.pi/2, 0) if m[2,1] > 0 \ else (-np.arctan2(m[0,2], m[0,0]), -np.pi/2, 0), zyx = lambda m: \ (np.arctan2(m[1,0], m[0,0]), np.arcsin(-m[2,0]), np.arctan2(m[2,1], m[2,2])) if abs(m[2,0]) < 1 - _EPS \ else (np.arctan2(-m[1,2], m[1,1]), -np.pi/2, 0) if m[2,0] > 0 \ else (-np.arctan2(-m[1,2], m[1,1]), np.pi/2, 0), xyx = lambda m: \ (np.arctan2(m[1,0], -m[2,0]), np.arccos(m[0,0]), np.arctan2(m[0,1], m[0,2])) if abs(m[0,0]) < 1 - _EPS \ else (np.arctan2(-m[1,2], m[1,1]), 0, 0) if m[0,0] > 0 \ else (-np.arctan2(-m[1,2], m[1,1]), np.pi, 0), xzx = lambda m: \ (np.arctan2(m[2,0], m[1,0]), np.arccos(m[0,0]), np.arctan2(m[0,2], -m[0,1])) if abs(m[0,0]) < 1 - _EPS \ else (np.arctan2(m[2,1], m[2,2]), 0, 0) if m[0,0] > 0 \ else (-np.arctan2(m[2,1], m[2,2]), np.pi, 0), yxy = lambda m: \ (np.arctan2(m[0,1], m[2,1]), np.arccos(m[1,1]), np.arctan2(m[1,0], -m[1,2])) if abs(m[1,1]) < 1 - _EPS \ else (np.arctan2(m[0,2], m[0,0]), 0, 0) if m[1,1] > 0 \ else (-np.arctan2(m[0,2], m[0,0]), np.pi, 0), yzy = lambda m: \ (np.arctan2(m[2,1], -m[0,1]), np.arccos(m[1,1]), np.arctan2(m[1,2], m[1,0])) if abs(m[1,1]) < 1 - _EPS \ else (np.arctan2(-m[2,0], m[2,2]), 0, 0) if m[1,1] > 0 \ else (-np.arctan2(-m[2,0], m[2,2]), np.pi, 0), zxz = lambda m: \ (np.arctan2(m[0,2], -m[1,2]), np.arccos(m[2,2]), np.arctan2(m[2,0], m[2,1])) if abs(m[2,2]) < 1 - _EPS \ else (np.arctan2(-m[0,1], m[0,0]), 0, 0) if m[2,2] > 0 \ else (-np.arctan2(-m[0,1], m[0,0]), np.pi, 0), zyz = lambda m: \ (np.arctan2(m[1,2], m[0,2]), np.arccos(m[2,2]), np.arctan2(m[2,1], -m[2,0])) if abs(m[2,2]) < 1 - _EPS \ else (np.arctan2(m[1,0], m[1,1]), 0, 0) if m[2,2] > 0 \ else (-np.arctan2(m[1,0], m[1,1]), np.pi, 0), xy = lambda m: (np.arctan2(m[2,1], m[1,1]), np.arctan2(m[0,2], m[0,0])), xz = lambda m: (np.arctan2(-m[1,2], m[2,2]), np.arctan2(-m[0,1], m[0,0])), yx = lambda m: (np.arctan2(-m[2,0], m[0,0]), np.arctan2(-m[1,2], m[1,1])), yz = lambda m: (np.arctan2(m[0,2], m[2,2]), np.arctan2(m[1,0], m[1,1])), zx = lambda m: (np.arctan2(m[1,0], m[0,0]), np.arctan2(m[2,1], m[2,2])), zy = lambda m: (np.arctan2(-m[0,1], m[1,1]), np.arctan2(-m[2,0], m[2,2])), x = lambda m: (np.arctan2(m[2,1], m[2,2]),), y = lambda m: (np.arctan2(m[0,2], m[0,0]),), z = lambda m: (np.arctan2(m[1,0], m[1,1]),)) def matrixToEuler(m,order='zyx',inDegrees=True): """ Convert a 3x3 rotation matrix to an Euler angle sequence. @param m: 3x3 L{np.matrix}, or equivalent, to convert. @param order: The order of the Euler angle sequence, e.g. 'zyx' @param inDegrees: True to return result in degrees, False for radians. @return: L{np.ndarray} of Euler angles in specified order. """ order = order.lower() if order not in _eulerFuncs.keys(): raise NotImplementedError, "Order %s not implemented" % order result = np.array(_eulerFuncs[order](m)) if inDegrees: return np.degrees(result) else: return result def matrixFromEuler(angles, order, inDegrees=True): """ Generate a rotation matrix from an Euler angle sequence. @param angles: Sequence of Euler rotation angles. @param order: Sequence of rotation axes. Rotations are applied sequentially from left to right, i.e. the string 'zyx' would result in rotation about the Z axis, then the new Y axis, and finally about the new X axis. @param inDegrees: Whether the angles are in degrees (`True`) or radians (`False`) @return: 3x3 rotation matrix corresponding to the Euler angle sequence. """ assert len(angles) == len(order) if inDegrees: angles = np.radians(angles) return reduce(operator.mul, (_rotationMatrices[axis](angle) for axis,angle in izip(order.lower(), angles)))
gpl-3.0
ppries/tensorflow
tensorflow/contrib/bayesflow/python/kernel_tests/monte_carlo_test.py
23
6678
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for Monte Carlo Ops.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf distributions = tf.contrib.distributions layers = tf.contrib.layers monte_carlo = tf.contrib.bayesflow.monte_carlo class ExpectationImportanceSampleTest(tf.test.TestCase): def test_normal_integral_mean_and_var_correctly_estimated(self): n = int(1e6) with self.test_session(): mu_p = tf.constant([-1.0, 1.0], dtype=tf.float64) mu_q = tf.constant([0.0, 0.0], dtype=tf.float64) sigma_p = tf.constant([0.5, 0.5], dtype=tf.float64) sigma_q = tf.constant([1.0, 1.0], dtype=tf.float64) p = distributions.Normal(mu=mu_p, sigma=sigma_p) q = distributions.Normal(mu=mu_q, sigma=sigma_q) # Compute E_p[X]. e_x = monte_carlo.expectation_importance_sampler( f=lambda x: x, log_p=p.log_prob, sampling_dist_q=q, n=n, seed=42) # Compute E_p[X^2]. e_x2 = monte_carlo.expectation_importance_sampler( f=tf.square, log_p=p.log_prob, sampling_dist_q=q, n=n, seed=42) stdev = tf.sqrt(e_x2 - tf.square(e_x)) # Relative tolerance (rtol) chosen 2 times as large as minimim needed to # pass. # Convergence of mean is +- 0.003 if n = 100M # Convergence of std is +- 0.00001 if n = 100M self.assertEqual(p.get_batch_shape(), e_x.get_shape()) self.assertAllClose(p.mean().eval(), e_x.eval(), rtol=0.01) self.assertAllClose(p.std().eval(), stdev.eval(), rtol=0.02) def test_multivariate_normal_prob_positive_product_of_components(self): # Test that importance sampling can correctly estimate the probability that # the product of components in a MultivariateNormal are > 0. n = 1000 with self.test_session(): p = distributions.MultivariateNormalDiag( mu=[0.0, 0.0], diag_stdev=[1.0, 1.0]) q = distributions.MultivariateNormalDiag( mu=[0.5, 0.5], diag_stdev=[3., 3.]) # Compute E_p[X_1 * X_2 > 0], with X_i the ith component of X ~ p(x). # Should equal 1/2 because p is a spherical Gaussian centered at (0, 0). def indicator(x): x1_times_x2 = tf.reduce_prod(x, reduction_indices=[-1]) return 0.5 * (tf.sign(x1_times_x2) + 1.0) prob = monte_carlo.expectation_importance_sampler( f=indicator, log_p=p.log_prob, sampling_dist_q=q, n=n, seed=42) # Relative tolerance (rtol) chosen 2 times as large as minimim needed to # pass. # Convergence is +- 0.004 if n = 100k. self.assertEqual(p.get_batch_shape(), prob.get_shape()) self.assertAllClose(0.5, prob.eval(), rtol=0.05) class ExpectationImportanceSampleLogspaceTest(tf.test.TestCase): def test_normal_distribution_second_moment_estimated_correctly(self): # Test the importance sampled estimate against an analytical result. n = int(1e6) with self.test_session(): mu_p = tf.constant([0.0, 0.0], dtype=tf.float64) mu_q = tf.constant([-1.0, 1.0], dtype=tf.float64) sigma_p = tf.constant([1.0, 2 / 3.], dtype=tf.float64) sigma_q = tf.constant([1.0, 1.0], dtype=tf.float64) p = distributions.Normal(mu=mu_p, sigma=sigma_p) q = distributions.Normal(mu=mu_q, sigma=sigma_q) # Compute E_p[X^2]. # Should equal [1, (2/3)^2] log_e_x2 = monte_carlo.expectation_importance_sampler_logspace( log_f=lambda x: tf.log(tf.square(x)), log_p=p.log_prob, sampling_dist_q=q, n=n, seed=42) e_x2 = tf.exp(log_e_x2) # Relative tolerance (rtol) chosen 2 times as large as minimim needed to # pass. self.assertEqual(p.get_batch_shape(), e_x2.get_shape()) self.assertAllClose([1., (2 / 3.)**2], e_x2.eval(), rtol=0.02) class ExpectationTest(tf.test.TestCase): def test_mc_estimate_of_normal_mean_and_variance_is_correct_vs_analytic(self): tf.set_random_seed(0) n = 20000 with self.test_session(): p = distributions.Normal(mu=[1.0, -1.0], sigma=[0.3, 0.5]) # Compute E_p[X] and E_p[X^2]. z = p.sample_n(n=n) e_x = monte_carlo.expectation(lambda x: x, p, z=z, seed=42) e_x2 = monte_carlo.expectation(tf.square, p, z=z, seed=0) var = e_x2 - tf.square(e_x) self.assertEqual(p.get_batch_shape(), e_x.get_shape()) self.assertEqual(p.get_batch_shape(), e_x2.get_shape()) # Relative tolerance (rtol) chosen 2 times as large as minimim needed to # pass. self.assertAllClose(p.mean().eval(), e_x.eval(), rtol=0.01) self.assertAllClose(p.variance().eval(), var.eval(), rtol=0.02) class GetSamplesTest(tf.test.TestCase): """Test the private method 'get_samples'.""" def test_raises_if_both_z_and_n_are_none(self): with self.test_session(): dist = distributions.Normal(mu=0., sigma=1.) z = None n = None seed = None with self.assertRaisesRegexp(ValueError, 'exactly one'): monte_carlo._get_samples(dist, z, n, seed) def test_raises_if_both_z_and_n_are_not_none(self): with self.test_session(): dist = distributions.Normal(mu=0., sigma=1.) z = dist.sample_n(n=1) n = 1 seed = None with self.assertRaisesRegexp(ValueError, 'exactly one'): monte_carlo._get_samples(dist, z, n, seed) def test_returns_n_samples_if_n_provided(self): with self.test_session(): dist = distributions.Normal(mu=0., sigma=1.) z = None n = 10 seed = None z = monte_carlo._get_samples(dist, z, n, seed) self.assertEqual((10,), z.get_shape()) def test_returns_z_if_z_provided(self): with self.test_session(): dist = distributions.Normal(mu=0., sigma=1.) z = dist.sample_n(n=10) n = None seed = None z = monte_carlo._get_samples(dist, z, n, seed) self.assertEqual((10,), z.get_shape()) if __name__ == '__main__': tf.test.main()
apache-2.0
jantman/nagios-scripts
check_icinga_ido.py
1
6939
#!/usr/bin/env python """ Script to check last update of core programstatus and service checks in Icinga ido2db Postgres database """ # # The latest version of this script lives at: # <https://github.com/jantman/nagios-scripts/blob/master/check_puppetdb_agent_run.py> # # Please file bug/feature requests and submit patches through # the above GitHub repository. Feedback and patches are greatly # appreciated; patches are preferred as GitHub pull requests, but # emailed patches are also accepted. # # Copyright 2014 Jason Antman <[email protected]> all rights reserved. # See the above git repository's LICENSE file for license terms (GPLv3). # import sys from datetime import datetime import pytz import logging import argparse from math import ceil import nagiosplugin import psycopg2 import pprint _log = logging.getLogger('nagiosplugin') utc = pytz.utc class IdoStatus(nagiosplugin.Resource): """Check age of ido2db programstatus and last service check in postgres database""" def __init__(self, db_host, db_name, db_user, db_pass, db_port=5432): self.db_host = db_host self.db_user = db_user self.db_pass = db_pass self.db_port = db_port self.db_name = db_name def probe(self): _log.info("connecting to Postgres DB %s on %s" % (self.db_name, self.db_host)) try: conn_str = "dbname='%s' user='%s' host='%s' password='%s' port='%s' application_name='%s'" % ( self.db_name, self.db_user, self.db_host, self.db_pass, self.db_port, "check_icinga_ido_core.py", ) _log.debug("psycopg2 connect string: %s" % conn_str) conn = psycopg2.connect(conn_str) except psycopg2.OperationalError, e: _log.info("got psycopg2.OperationalError: %s" % e.__str__()) raise nagiosplugin.CheckError(e.__str__()) _log.info("connected to database") # these queries come from https://wiki.icinga.org/display/testing/Special+IDOUtils+Queries cur = conn.cursor() _log.debug("got cursor") sql = "SELECT EXTRACT(EPOCH FROM (NOW()-status_update_time)) AS age from icinga_programstatus where (UNIX_TIMESTAMP(status_update_time) > UNIX_TIMESTAMP(NOW())-60);" _log.debug("executing query: %s" % sql) cur.execute(sql) row = cur.fetchone() _log.debug("result: %s" % row) programstatus_age = ceil(row[0]) sql = "select (UNIX_TIMESTAMP(NOW())-UNIX_TIMESTAMP(ss.status_update_time)) as age from icinga_servicestatus ss join icinga_objects os on os.object_id=ss.service_object_id order by status_update_time desc limit 1;" _log.debug("executing query: %s" % sql) cur.execute(sql) row = cur.fetchone() _log.debug("result: %s" % row) last_check_age = ceil(row[0]) return [ nagiosplugin.Metric('programstatus_age', programstatus_age, uom='s', min=0), nagiosplugin.Metric('last_check_age', last_check_age, uom='s', min=0), ] class LoadSummary(nagiosplugin.Summary): """LoadSummary is used to provide custom outputs to the check""" def __init__(self, db_name): self.db_name = db_name def _human_time(self, seconds): """convert an integer seconds into human-readable hms""" mins, secs = divmod(seconds, 60) hours, mins = divmod(mins, 60) return '%02d:%02d:%02d' % (hours, mins, secs) def _state_marker(self, state): """return a textual marker for result states""" if type(state) == type(nagiosplugin.state.Critical): return " (Crit)" if type(state) == type(nagiosplugin.state.Warn): return " (Warn)" if type(state) == type(nagiosplugin.state.Unknown): return " (Unk)" return "" def status_line(self, results): if type(results.most_significant_state) == type(nagiosplugin.state.Unknown): # won't have perf values, so special handling return results.most_significant[0].hint.splitlines()[0] return "Last Programstatus Update %s ago%s; Last Service Status Update %s ago%s (%s)" % ( self._human_time(results['programstatus_age'].metric.value), self._state_marker(results['programstatus_age'].state), self._human_time(results['last_check_age'].metric.value), self._state_marker(results['last_check_age'].state), self.db_name) def ok(self, results): return self.status_line(results) def problem(self, results): return self.status_line(results) @nagiosplugin.guarded def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('-H', '--hostname', dest='hostname', help='Postgres server hostname') parser.add_argument('-p', '--port', dest='port', default='5432', help='Postgres port (Default: 5432)') parser.add_argument('-u', '--username', dest='username', default='icinga-ido', help='Postgres username (Default: icinga-ido)') parser.add_argument('-a', '--password', dest='password', default='icinga', help='Postgres password (Default: icinga)') parser.add_argument('-n', '--db-name', dest='db_name', default='icinga_ido', help='Postgres database name (Default: icinga_ido)') parser.add_argument('-w', '--warning', dest='warning', default='120', help='warning threshold for age of last programstatus or service status update, in seconds (Default: 120 / 2m)') parser.add_argument('-c', '--critical', dest='critical', default='600', help='critical threshold for age of last programstatus or service status update, in seconds (Default: 600 / 10m)') parser.add_argument('-v', '--verbose', action='count', default=0, help='increase output verbosity (use up to 3 times)') parser.add_argument('-t', '--timeout', dest='timeout', default=30, help='timeout (in seconds) for the command (Default: 30)') args = parser.parse_args() if not args.hostname: raise nagiosplugin.CheckError('hostname (-H|--hostname) must be provided') check = nagiosplugin.Check( IdoStatus(args.hostname, args.db_name, args.username, args.password, args.port), nagiosplugin.ScalarContext('programstatus_age', args.warning, args.critical), nagiosplugin.ScalarContext('last_check_age', args.warning, args.critical), LoadSummary(args.db_name)) check.main(args.verbose, args.timeout) if __name__ == '__main__': main()
gpl-3.0
google/contentbox
third_party/requests/cookies.py
821
16686
# -*- coding: utf-8 -*- """ Compatibility code to be able to use `cookielib.CookieJar` with requests. requests.utils imports from here, so be careful with imports. """ import time import collections from .compat import cookielib, urlparse, urlunparse, Morsel try: import threading # grr, pyflakes: this fixes "redefinition of unused 'threading'" threading except ImportError: import dummy_threading as threading class MockRequest(object): """Wraps a `requests.Request` to mimic a `urllib2.Request`. The code in `cookielib.CookieJar` expects this interface in order to correctly manage cookie policies, i.e., determine whether a cookie can be set, given the domains of the request and the cookie. The original request object is read-only. The client is responsible for collecting the new headers via `get_new_headers()` and interpreting them appropriately. You probably want `get_cookie_header`, defined below. """ def __init__(self, request): self._r = request self._new_headers = {} self.type = urlparse(self._r.url).scheme def get_type(self): return self.type def get_host(self): return urlparse(self._r.url).netloc def get_origin_req_host(self): return self.get_host() def get_full_url(self): # Only return the response's URL if the user hadn't set the Host # header if not self._r.headers.get('Host'): return self._r.url # If they did set it, retrieve it and reconstruct the expected domain host = self._r.headers['Host'] parsed = urlparse(self._r.url) # Reconstruct the URL as we expect it return urlunparse([ parsed.scheme, host, parsed.path, parsed.params, parsed.query, parsed.fragment ]) def is_unverifiable(self): return True def has_header(self, name): return name in self._r.headers or name in self._new_headers def get_header(self, name, default=None): return self._r.headers.get(name, self._new_headers.get(name, default)) def add_header(self, key, val): """cookielib has no legitimate use for this method; add it back if you find one.""" raise NotImplementedError("Cookie headers should be added with add_unredirected_header()") def add_unredirected_header(self, name, value): self._new_headers[name] = value def get_new_headers(self): return self._new_headers @property def unverifiable(self): return self.is_unverifiable() @property def origin_req_host(self): return self.get_origin_req_host() @property def host(self): return self.get_host() class MockResponse(object): """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`. ...what? Basically, expose the parsed HTTP headers from the server response the way `cookielib` expects to see them. """ def __init__(self, headers): """Make a MockResponse for `cookielib` to read. :param headers: a httplib.HTTPMessage or analogous carrying the headers """ self._headers = headers def info(self): return self._headers def getheaders(self, name): self._headers.getheaders(name) def extract_cookies_to_jar(jar, request, response): """Extract the cookies from the response into a CookieJar. :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar) :param request: our own requests.Request object :param response: urllib3.HTTPResponse object """ if not (hasattr(response, '_original_response') and response._original_response): return # the _original_response field is the wrapped httplib.HTTPResponse object, req = MockRequest(request) # pull out the HTTPMessage with the headers and put it in the mock: res = MockResponse(response._original_response.msg) jar.extract_cookies(res, req) def get_cookie_header(jar, request): """Produce an appropriate Cookie header string to be sent with `request`, or None.""" r = MockRequest(request) jar.add_cookie_header(r) return r.get_new_headers().get('Cookie') def remove_cookie_by_name(cookiejar, name, domain=None, path=None): """Unsets a cookie by name, by default over all domains and paths. Wraps CookieJar.clear(), is O(n). """ clearables = [] for cookie in cookiejar: if cookie.name == name: if domain is None or domain == cookie.domain: if path is None or path == cookie.path: clearables.append((cookie.domain, cookie.path, cookie.name)) for domain, path, name in clearables: cookiejar.clear(domain, path, name) class CookieConflictError(RuntimeError): """There are two cookies that meet the criteria specified in the cookie jar. Use .get and .set and include domain and path args in order to be more specific.""" class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping): """Compatibility class; is a cookielib.CookieJar, but exposes a dict interface. This is the CookieJar we create by default for requests and sessions that don't specify one, since some clients may expect response.cookies and session.cookies to support dict operations. Don't use the dict interface internally; it's just for compatibility with with external client code. All `requests` code should work out of the box with externally provided instances of CookieJar, e.g., LWPCookieJar and FileCookieJar. Caution: dictionary operations that are normally O(1) may be O(n). Unlike a regular CookieJar, this class is pickleable. """ def get(self, name, default=None, domain=None, path=None): """Dict-like get() that also supports optional domain and path args in order to resolve naming collisions from using one cookie jar over multiple domains. Caution: operation is O(n), not O(1).""" try: return self._find_no_duplicates(name, domain, path) except KeyError: return default def set(self, name, value, **kwargs): """Dict-like set() that also supports optional domain and path args in order to resolve naming collisions from using one cookie jar over multiple domains.""" # support client code that unsets cookies by assignment of a None value: if value is None: remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path')) return if isinstance(value, Morsel): c = morsel_to_cookie(value) else: c = create_cookie(name, value, **kwargs) self.set_cookie(c) return c def iterkeys(self): """Dict-like iterkeys() that returns an iterator of names of cookies from the jar. See itervalues() and iteritems().""" for cookie in iter(self): yield cookie.name def keys(self): """Dict-like keys() that returns a list of names of cookies from the jar. See values() and items().""" return list(self.iterkeys()) def itervalues(self): """Dict-like itervalues() that returns an iterator of values of cookies from the jar. See iterkeys() and iteritems().""" for cookie in iter(self): yield cookie.value def values(self): """Dict-like values() that returns a list of values of cookies from the jar. See keys() and items().""" return list(self.itervalues()) def iteritems(self): """Dict-like iteritems() that returns an iterator of name-value tuples from the jar. See iterkeys() and itervalues().""" for cookie in iter(self): yield cookie.name, cookie.value def items(self): """Dict-like items() that returns a list of name-value tuples from the jar. See keys() and values(). Allows client-code to call "dict(RequestsCookieJar) and get a vanilla python dict of key value pairs.""" return list(self.iteritems()) def list_domains(self): """Utility method to list all the domains in the jar.""" domains = [] for cookie in iter(self): if cookie.domain not in domains: domains.append(cookie.domain) return domains def list_paths(self): """Utility method to list all the paths in the jar.""" paths = [] for cookie in iter(self): if cookie.path not in paths: paths.append(cookie.path) return paths def multiple_domains(self): """Returns True if there are multiple domains in the jar. Returns False otherwise.""" domains = [] for cookie in iter(self): if cookie.domain is not None and cookie.domain in domains: return True domains.append(cookie.domain) return False # there is only one domain in jar def get_dict(self, domain=None, path=None): """Takes as an argument an optional domain and path and returns a plain old Python dict of name-value pairs of cookies that meet the requirements.""" dictionary = {} for cookie in iter(self): if (domain is None or cookie.domain == domain) and (path is None or cookie.path == path): dictionary[cookie.name] = cookie.value return dictionary def __getitem__(self, name): """Dict-like __getitem__() for compatibility with client code. Throws exception if there are more than one cookie with name. In that case, use the more explicit get() method instead. Caution: operation is O(n), not O(1).""" return self._find_no_duplicates(name) def __setitem__(self, name, value): """Dict-like __setitem__ for compatibility with client code. Throws exception if there is already a cookie of that name in the jar. In that case, use the more explicit set() method instead.""" self.set(name, value) def __delitem__(self, name): """Deletes a cookie given a name. Wraps cookielib.CookieJar's remove_cookie_by_name().""" remove_cookie_by_name(self, name) def set_cookie(self, cookie, *args, **kwargs): if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'): cookie.value = cookie.value.replace('\\"', '') return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs) def update(self, other): """Updates this jar with cookies from another CookieJar or dict-like""" if isinstance(other, cookielib.CookieJar): for cookie in other: self.set_cookie(cookie) else: super(RequestsCookieJar, self).update(other) def _find(self, name, domain=None, path=None): """Requests uses this method internally to get cookie values. Takes as args name and optional domain and path. Returns a cookie.value. If there are conflicting cookies, _find arbitrarily chooses one. See _find_no_duplicates if you want an exception thrown if there are conflicting cookies.""" for cookie in iter(self): if cookie.name == name: if domain is None or cookie.domain == domain: if path is None or cookie.path == path: return cookie.value raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) def _find_no_duplicates(self, name, domain=None, path=None): """__get_item__ and get call _find_no_duplicates -- never used in Requests internally. Takes as args name and optional domain and path. Returns a cookie.value. Throws KeyError if cookie is not found and CookieConflictError if there are multiple cookies that match name and optionally domain and path.""" toReturn = None for cookie in iter(self): if cookie.name == name: if domain is None or cookie.domain == domain: if path is None or cookie.path == path: if toReturn is not None: # if there are multiple cookies that meet passed in criteria raise CookieConflictError('There are multiple cookies with name, %r' % (name)) toReturn = cookie.value # we will eventually return this as long as no cookie conflict if toReturn: return toReturn raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) def __getstate__(self): """Unlike a normal CookieJar, this class is pickleable.""" state = self.__dict__.copy() # remove the unpickleable RLock object state.pop('_cookies_lock') return state def __setstate__(self, state): """Unlike a normal CookieJar, this class is pickleable.""" self.__dict__.update(state) if '_cookies_lock' not in self.__dict__: self._cookies_lock = threading.RLock() def copy(self): """Return a copy of this RequestsCookieJar.""" new_cj = RequestsCookieJar() new_cj.update(self) return new_cj def create_cookie(name, value, **kwargs): """Make a cookie from underspecified parameters. By default, the pair of `name` and `value` will be set for the domain '' and sent on every request (this is sometimes called a "supercookie"). """ result = dict( version=0, name=name, value=value, port=None, domain='', path='/', secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False,) badargs = set(kwargs) - set(result) if badargs: err = 'create_cookie() got unexpected keyword arguments: %s' raise TypeError(err % list(badargs)) result.update(kwargs) result['port_specified'] = bool(result['port']) result['domain_specified'] = bool(result['domain']) result['domain_initial_dot'] = result['domain'].startswith('.') result['path_specified'] = bool(result['path']) return cookielib.Cookie(**result) def morsel_to_cookie(morsel): """Convert a Morsel object into a Cookie containing the one k/v pair.""" expires = None if morsel['max-age']: expires = time.time() + morsel['max-age'] elif morsel['expires']: time_template = '%a, %d-%b-%Y %H:%M:%S GMT' expires = time.mktime( time.strptime(morsel['expires'], time_template)) - time.timezone return create_cookie( comment=morsel['comment'], comment_url=bool(morsel['comment']), discard=False, domain=morsel['domain'], expires=expires, name=morsel.key, path=morsel['path'], port=None, rest={'HttpOnly': morsel['httponly']}, rfc2109=False, secure=bool(morsel['secure']), value=morsel.value, version=morsel['version'] or 0, ) def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True): """Returns a CookieJar from a key/value dictionary. :param cookie_dict: Dict of key/values to insert into CookieJar. :param cookiejar: (optional) A cookiejar to add the cookies to. :param overwrite: (optional) If False, will not replace cookies already in the jar with new ones. """ if cookiejar is None: cookiejar = RequestsCookieJar() if cookie_dict is not None: names_from_jar = [cookie.name for cookie in cookiejar] for name in cookie_dict: if overwrite or (name not in names_from_jar): cookiejar.set_cookie(create_cookie(name, cookie_dict[name])) return cookiejar def merge_cookies(cookiejar, cookies): """Add cookies to cookiejar and returns a merged CookieJar. :param cookiejar: CookieJar object to add the cookies to. :param cookies: Dictionary or CookieJar object to be added. """ if not isinstance(cookiejar, cookielib.CookieJar): raise ValueError('You can only merge into CookieJar') if isinstance(cookies, dict): cookiejar = cookiejar_from_dict( cookies, cookiejar=cookiejar, overwrite=False) elif isinstance(cookies, cookielib.CookieJar): try: cookiejar.update(cookies) except AttributeError: for cookie_in_jar in cookies: cookiejar.set_cookie(cookie_in_jar) return cookiejar
apache-2.0
tseaver/google-cloud-python
talent/google/cloud/talent_v4beta1/gapic/transports/event_service_grpc_transport.py
2
4874
# -*- coding: utf-8 -*- # # Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import google.api_core.grpc_helpers from google.cloud.talent_v4beta1.proto import event_service_pb2_grpc class EventServiceGrpcTransport(object): """gRPC transport class providing stubs for google.cloud.talent.v4beta1 EventService API. The transport provides access to the raw gRPC stubs, which can be used to take advantage of advanced features of gRPC. """ # The scopes needed to make gRPC calls to all of the methods defined # in this service. _OAUTH_SCOPES = ( "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/jobs", ) def __init__( self, channel=None, credentials=None, address="jobs.googleapis.com:443" ): """Instantiate the transport class. Args: channel (grpc.Channel): A ``Channel`` instance through which to make calls. This argument is mutually exclusive with ``credentials``; providing both will raise an exception. credentials (google.auth.credentials.Credentials): The authorization credentials to attach to requests. These credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. address (str): The address where the service is hosted. """ # If both `channel` and `credentials` are specified, raise an # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( "The `channel` and `credentials` arguments are mutually " "exclusive." ) # Create the channel. if channel is None: channel = self.create_channel( address=address, credentials=credentials, options={ "grpc.max_send_message_length": -1, "grpc.max_receive_message_length": -1, }.items(), ) self._channel = channel # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. self._stubs = { "event_service_stub": event_service_pb2_grpc.EventServiceStub(channel) } @classmethod def create_channel( cls, address="jobs.googleapis.com:443", credentials=None, **kwargs ): """Create and return a gRPC channel object. Args: address (str): The host for the channel to use. credentials (~.Credentials): The authorization credentials to attach to requests. These credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. kwargs (dict): Keyword arguments, which are passed to the channel creation. Returns: grpc.Channel: A gRPC channel object. """ return google.api_core.grpc_helpers.create_channel( address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs ) @property def channel(self): """The gRPC channel used by the transport. Returns: grpc.Channel: A gRPC channel object. """ return self._channel @property def create_client_event(self): """Return the gRPC stub for :meth:`EventServiceClient.create_client_event`. Report events issued when end user interacts with customer's application that uses Cloud Talent Solution. You may inspect the created events in `self service tools <https://console.cloud.google.com/talent-solution/overview>`__. `Learn more <https://cloud.google.com/talent-solution/docs/management-tools>`__ about self service tools. Returns: Callable: A callable which accepts the appropriate deserialized request object and returns a deserialized response object. """ return self._stubs["event_service_stub"].CreateClientEvent
apache-2.0
rpm-software-management/yum-utils
repomanage.py
7
7043
#!/usr/bin/python # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # (c) Copyright Seth Vidal 2004 # need hdropen, dir traversing, version comparison, and getopt (eventually) # this should take a dir, traverse it - build a dict of foo[(name, arch)] = [/path/to/file/that/is/highest, /path/to/equalfile] import os import sys import rpm import fnmatch import string import rpmUtils from yum import misc from optparse import OptionParser def errorprint(stuff): print >> sys.stderr, stuff def getFileList(path, ext, filelist): """Return all files in path matching ext, store them in filelist, recurse dirs return list object""" extlen = len(ext) try: dir_list = os.listdir(path) except OSError, e: errorprint('Error accessing directory %s, %s' % (path, str(e))) return [] for d in dir_list: if os.path.isdir(path + '/' + d): filelist = getFileList(path + '/' + d, ext, filelist) else: if string.lower(d[-extlen:]) == '%s' % (ext): newpath = os.path.normpath(path + '/' + d) filelist.append(newpath) return filelist def trimRpms(rpms, excludeGlobs): # print 'Pre-Trim Len: %d' % len(rpms) badrpms = [] for fn in rpms: for glob in excludeGlobs: if fnmatch.fnmatch(fn, glob): # print 'excluded: %s' % fn if fn not in badrpms: badrpms.append(fn) for fn in badrpms: if fn in rpms: rpms.remove(fn) # print 'Post-Trim Len: %d' % len(rpms) return rpms def parseargs(args): usage = """ repomanage: manage a directory of rpm packages. returns lists of newest or oldest packages in a directory for easy piping to xargs or similar programs. repomanage [--old] [--new] path. """ parser = OptionParser(usage=usage) # new is only used to make sure that the user is not trying to get both # new and old, after this old and not old will be used. # (default = not old = new) parser.add_option("-o", "--old", default=False, action="store_true", help='print the older packages') parser.add_option("-n", "--new", default=False, action="store_true", help='print the newest packages') parser.add_option("-s", "--space", default=False, action="store_true", help='space separated output, not newline') parser.add_option("-k", "--keep", default=1, dest='keep', action="store", help='newest N packages to keep - defaults to 1') parser.add_option("-c", "--nocheck", default=0, action="store_true", help='do not check package payload signatures/digests') (opts, args)= parser.parse_args() if opts.new and opts.old: errorprint('\nPass either --old or --new, not both!\n') print parser.format_help() sys.exit(1) if len(args) > 1: errorprint('Error: Only one directory allowed per run.') print parser.format_help() sys.exit(1) if len(args) < 1: errorprint('Error: Must specify a directory to index.') print parser.format_help() sys.exit(1) return (opts, args) def main(args): (options, args) = parseargs(args) mydir = args[0] rpmList = [] rpmList = getFileList(mydir, '.rpm', rpmList) verfile = {} pkgdict = {} # hold all of them - put them in (n,a) = [(e,v,r),(e1,v1,r1)] keepnum = int(options.keep)*(-1) # the number of items to keep if len(rpmList) == 0: errorprint('No files to process') sys.exit(1) ts = rpm.TransactionSet() if options.nocheck: ts.setVSFlags(~(rpm._RPMVSF_NOPAYLOAD)) else: ts.setVSFlags(~(rpm.RPMVSF_NOMD5|rpm.RPMVSF_NEEDPAYLOAD)) for pkg in rpmList: try: hdr = rpmUtils.miscutils.hdrFromPackage(ts, pkg) except rpmUtils.RpmUtilsError, e: msg = "Error opening pkg %s: %s" % (pkg, str(e)) errorprint(msg) continue pkgtuple = rpmUtils.miscutils.pkgTupleFromHeader(hdr) (n,a,e,v,r) = pkgtuple del hdr if (n,a) not in pkgdict: pkgdict[(n,a)] = [] pkgdict[(n,a)].append((e,v,r)) if pkgtuple not in verfile: verfile[pkgtuple] = [] verfile[pkgtuple].append(pkg) for natup in pkgdict.keys(): evrlist = pkgdict[natup] if len(evrlist) > 1: evrlist = misc.unique(evrlist) evrlist.sort(rpmUtils.miscutils.compareEVR) pkgdict[natup] = evrlist del ts # now we have our dicts - we can return whatever by iterating over them outputpackages = [] #if new if not options.old: for (n,a) in pkgdict.keys(): evrlist = pkgdict[(n,a)] if len(evrlist) < abs(keepnum): newevrs = evrlist else: newevrs = evrlist[keepnum:] for (e,v,r) in newevrs: for pkg in verfile[(n,a,e,v,r)]: outputpackages.append(pkg) if options.old: for (n,a) in pkgdict.keys(): evrlist = pkgdict[(n,a)] if len(evrlist) < abs(keepnum): continue oldevrs = evrlist[:keepnum] for (e,v,r) in oldevrs: for pkg in verfile[(n,a,e,v,r)]: outputpackages.append(pkg) outputpackages.sort() for pkg in outputpackages: if options.space: print '%s' % pkg, else: print pkg def usage(): print """ repomanage [--old] [--new] path -o --old - print the older packages -n --new - print the newest packages -s --space - space separated output, not newline -k --keep - newest N packages to keep - defaults to 1 -c --nocheck - do not check package payload signatures/digests -h --help - duh By default it will output the full path to the newest packages in the path. """ if __name__ == "__main__": if len(sys.argv) < 1: usage() sys.exit(1) else: main(sys.argv[1:])
gpl-2.0
aslamplr/shorts
lib/oauthlib/oauth1/rfc5849/errors.py
17
2326
# coding=utf-8 """ oauthlib.oauth2.rfc6749.errors ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Error used both by OAuth 2 clients and provicers to represent the spec defined error responses for all four core grant types. """ from __future__ import unicode_literals from oauthlib.common import urlencode, add_params_to_uri class OAuth1Error(Exception): error = None def __init__(self, description=None, uri=None, status_code=400, request=None): """ description: A human-readable ASCII [USASCII] text providing additional information, used to assist the client developer in understanding the error that occurred. Values for the "error_description" parameter MUST NOT include characters outside the set x20-21 / x23-5B / x5D-7E. uri: A URI identifying a human-readable web page with information about the error, used to provide the client developer with additional information about the error. Values for the "error_uri" parameter MUST conform to the URI- Reference syntax, and thus MUST NOT include characters outside the set x21 / x23-5B / x5D-7E. state: A CSRF protection value received from the client. request: Oauthlib Request object """ self.description = description self.uri = uri self.status_code = status_code def in_uri(self, uri): return add_params_to_uri(uri, self.twotuples) @property def twotuples(self): error = [('error', self.error)] if self.description: error.append(('error_description', self.description)) if self.uri: error.append(('error_uri', self.uri)) return error @property def urlencoded(self): return urlencode(self.twotuples) class InsecureTransportError(OAuth1Error): error = 'insecure_transport_protocol' description = 'Only HTTPS connections are permitted.' class InvalidSignatureMethodError(OAuth1Error): error = 'invalid_signature_method' class InvalidRequestError(OAuth1Error): error = 'invalid_request' class InvalidClientError(OAuth1Error): error = 'invalid_client'
mit
3DLIRIOUS/BlendSCAD
examples/example014.scad.py
1
1763
# OpenSCAD example, ported by Michael Mlivoncic # a beautiful dice... # an interesting test case, to get the Boolean operations somehow fixed (TODO) #import sys #sys.path.append("O:/BlenderStuff") import blendscad #import imp #imp.reload(blendscad) #imp.reload(blendscad.core) #imp.reload(blendscad.primitives) blendscad.initns( globals() ) # try to add BlendSCAD names to current namespace .. as if they would be in this file... ## Clear the open .blend file!!! clearAllObjects() ###### End of Header ############################################################################## #OpenSCAD' intersection_for() is only a work around. As standard "for" implies a union of its content, this one is a combination of # for() and intersection() statements. # Not really needed as we currently do not support implicit union()'s, but to demonstrate, how it would be rewritten. # see: http://en.wikibooks.org/wiki/OpenSCAD_User_Manual/The_OpenSCAD_Language#Intersection_For_Loop # intersection_for(i = [ # [0, 0, 0], # [10, 20, 300], # [200, 40, 57], # [20, 88, 57] # ]) # rotate(i) cube([100, 20, 20], center = true) # example 2 - rotation: #intersection_for(i = [ ] tmp = None rnge = [ [ 0, 0, 0], [ 10, 20, 300], [200, 40, 57], [ 20, 88, 57] ] for i in rnge: tmp = intersection( rotate(i , cube([100, 20, 20], center = true)) , tmp); ###### Begin of Footer ############################################################################## color(rands(0,1,3)) # random color last object. to see "FINISH" :-) # print timestamp and finish - sometimes it is easier to see differences in console then :-) import time import datetime st = datetime.datetime.fromtimestamp( time.time() ).strftime('%Y-%m-%d %H:%M:%S') echo ("FINISH", st)
gpl-3.0
MrLoick/python-for-android
python-build/python-libs/gdata/build/lib/gdata/spreadsheet/__init__.py
147
17942
#!/usr/bin/python # # Copyright (C) 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Contains extensions to Atom objects used with Google Spreadsheets. """ __author__ = '[email protected] (Laura Beth Lincoln)' try: from xml.etree import cElementTree as ElementTree except ImportError: try: import cElementTree as ElementTree except ImportError: try: from xml.etree import ElementTree except ImportError: from elementtree import ElementTree import atom import gdata import re import string # XML namespaces which are often used in Google Spreadsheets entities. GSPREADSHEETS_NAMESPACE = 'http://schemas.google.com/spreadsheets/2006' GSPREADSHEETS_TEMPLATE = '{http://schemas.google.com/spreadsheets/2006}%s' GSPREADSHEETS_EXTENDED_NAMESPACE = ('http://schemas.google.com/spreadsheets' '/2006/extended') GSPREADSHEETS_EXTENDED_TEMPLATE = ('{http://schemas.google.com/spreadsheets' '/2006/extended}%s') class ColCount(atom.AtomBase): """The Google Spreadsheets colCount element """ _tag = 'colCount' _namespace = GSPREADSHEETS_NAMESPACE _children = atom.AtomBase._children.copy() _attributes = atom.AtomBase._attributes.copy() def __init__(self, text=None, extension_elements=None, extension_attributes=None): self.text = text self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} def ColCountFromString(xml_string): return atom.CreateClassFromXMLString(ColCount, xml_string) class RowCount(atom.AtomBase): """The Google Spreadsheets rowCount element """ _tag = 'rowCount' _namespace = GSPREADSHEETS_NAMESPACE _children = atom.AtomBase._children.copy() _attributes = atom.AtomBase._attributes.copy() def __init__(self, text=None, extension_elements=None, extension_attributes=None): self.text = text self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} def RowCountFromString(xml_string): return atom.CreateClassFromXMLString(RowCount, xml_string) class Cell(atom.AtomBase): """The Google Spreadsheets cell element """ _tag = 'cell' _namespace = GSPREADSHEETS_NAMESPACE _children = atom.AtomBase._children.copy() _attributes = atom.AtomBase._attributes.copy() _attributes['row'] = 'row' _attributes['col'] = 'col' _attributes['inputValue'] = 'inputValue' _attributes['numericValue'] = 'numericValue' def __init__(self, text=None, row=None, col=None, inputValue=None, numericValue=None, extension_elements=None, extension_attributes=None): self.text = text self.row = row self.col = col self.inputValue = inputValue self.numericValue = numericValue self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} def CellFromString(xml_string): return atom.CreateClassFromXMLString(Cell, xml_string) class Custom(atom.AtomBase): """The Google Spreadsheets custom element""" _namespace = GSPREADSHEETS_EXTENDED_NAMESPACE _children = atom.AtomBase._children.copy() _attributes = atom.AtomBase._attributes.copy() def __init__(self, column=None, text=None, extension_elements=None, extension_attributes=None): self.column = column # The name of the column self.text = text self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} def _BecomeChildElement(self, tree): new_child = ElementTree.Element('') tree.append(new_child) new_child.tag = '{%s}%s' % (self.__class__._namespace, self.column) self._AddMembersToElementTree(new_child) def _ToElementTree(self): new_tree = ElementTree.Element('{%s}%s' % (self.__class__._namespace, self.column)) self._AddMembersToElementTree(new_tree) return new_tree def _HarvestElementTree(self, tree): namespace_uri, local_tag = string.split(tree.tag[1:], "}", 1) self.column = local_tag # Fill in the instance members from the contents of the XML tree. for child in tree: self._ConvertElementTreeToMember(child) for attribute, value in tree.attrib.iteritems(): self._ConvertElementAttributeToMember(attribute, value) self.text = tree.text def CustomFromString(xml_string): element_tree = ElementTree.fromstring(xml_string) return _CustomFromElementTree(element_tree) def _CustomFromElementTree(element_tree): namespace_uri, local_tag = string.split(element_tree.tag[1:], "}", 1) if namespace_uri == GSPREADSHEETS_EXTENDED_NAMESPACE: new_custom = Custom() new_custom._HarvestElementTree(element_tree) new_custom.column = local_tag return new_custom return None class SpreadsheetsSpreadsheet(gdata.GDataEntry): """A Google Spreadsheets flavor of a Spreadsheet Atom Entry """ _tag = 'entry' _namespace = atom.ATOM_NAMESPACE _children = gdata.GDataEntry._children.copy() _attributes = gdata.GDataEntry._attributes.copy() def __init__(self, author=None, category=None, content=None, contributor=None, atom_id=None, link=None, published=None, rights=None, source=None, summary=None, title=None, control=None, updated=None, text=None, extension_elements=None, extension_attributes=None): self.author = author or [] self.category = category or [] self.content = content self.contributor = contributor or [] self.id = atom_id self.link = link or [] self.published = published self.rights = rights self.source = source self.summary = summary self.control = control self.title = title self.updated = updated self.text = text self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} def SpreadsheetsSpreadsheetFromString(xml_string): return atom.CreateClassFromXMLString(SpreadsheetsSpreadsheet, xml_string) class SpreadsheetsWorksheet(gdata.GDataEntry): """A Google Spreadsheets flavor of a Worksheet Atom Entry """ _tag = 'entry' _namespace = atom.ATOM_NAMESPACE _children = gdata.GDataEntry._children.copy() _attributes = gdata.GDataEntry._attributes.copy() _children['{%s}rowCount' % GSPREADSHEETS_NAMESPACE] = ('row_count', RowCount) _children['{%s}colCount' % GSPREADSHEETS_NAMESPACE] = ('col_count', ColCount) def __init__(self, author=None, category=None, content=None, contributor=None, atom_id=None, link=None, published=None, rights=None, source=None, summary=None, title=None, control=None, updated=None, row_count=None, col_count=None, text=None, extension_elements=None, extension_attributes=None): self.author = author or [] self.category = category or [] self.content = content self.contributor = contributor or [] self.id = atom_id self.link = link or [] self.published = published self.rights = rights self.source = source self.summary = summary self.control = control self.title = title self.updated = updated self.row_count = row_count self.col_count = col_count self.text = text self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} def SpreadsheetsWorksheetFromString(xml_string): return atom.CreateClassFromXMLString(SpreadsheetsWorksheet, xml_string) class SpreadsheetsCell(gdata.BatchEntry): """A Google Spreadsheets flavor of a Cell Atom Entry """ _tag = 'entry' _namespace = atom.ATOM_NAMESPACE _children = gdata.BatchEntry._children.copy() _attributes = gdata.BatchEntry._attributes.copy() _children['{%s}cell' % GSPREADSHEETS_NAMESPACE] = ('cell', Cell) def __init__(self, author=None, category=None, content=None, contributor=None, atom_id=None, link=None, published=None, rights=None, source=None, summary=None, title=None, control=None, updated=None, cell=None, batch_operation=None, batch_id=None, batch_status=None, text=None, extension_elements=None, extension_attributes=None): self.author = author or [] self.category = category or [] self.content = content self.contributor = contributor or [] self.id = atom_id self.link = link or [] self.published = published self.rights = rights self.source = source self.summary = summary self.control = control self.title = title self.batch_operation = batch_operation self.batch_id = batch_id self.batch_status = batch_status self.updated = updated self.cell = cell self.text = text self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} def SpreadsheetsCellFromString(xml_string): return atom.CreateClassFromXMLString(SpreadsheetsCell, xml_string) class SpreadsheetsList(gdata.GDataEntry): """A Google Spreadsheets flavor of a List Atom Entry """ _tag = 'entry' _namespace = atom.ATOM_NAMESPACE _children = gdata.GDataEntry._children.copy() _attributes = gdata.GDataEntry._attributes.copy() def __init__(self, author=None, category=None, content=None, contributor=None, atom_id=None, link=None, published=None, rights=None, source=None, summary=None, title=None, control=None, updated=None, custom=None, text=None, extension_elements=None, extension_attributes=None): self.author = author or [] self.category = category or [] self.content = content self.contributor = contributor or [] self.id = atom_id self.link = link or [] self.published = published self.rights = rights self.source = source self.summary = summary self.control = control self.title = title self.updated = updated self.custom = custom or {} self.text = text self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} # We need to overwrite _ConvertElementTreeToMember to add special logic to # convert custom attributes to members def _ConvertElementTreeToMember(self, child_tree): # Find the element's tag in this class's list of child members if self.__class__._children.has_key(child_tree.tag): member_name = self.__class__._children[child_tree.tag][0] member_class = self.__class__._children[child_tree.tag][1] # If the class member is supposed to contain a list, make sure the # matching member is set to a list, then append the new member # instance to the list. if isinstance(member_class, list): if getattr(self, member_name) is None: setattr(self, member_name, []) getattr(self, member_name).append(atom._CreateClassFromElementTree( member_class[0], child_tree)) else: setattr(self, member_name, atom._CreateClassFromElementTree(member_class, child_tree)) elif child_tree.tag.find('{%s}' % GSPREADSHEETS_EXTENDED_NAMESPACE) == 0: # If this is in the custom namespace, make add it to the custom dict. name = child_tree.tag[child_tree.tag.index('}')+1:] custom = _CustomFromElementTree(child_tree) if custom: self.custom[name] = custom else: ExtensionContainer._ConvertElementTreeToMember(self, child_tree) # We need to overwtite _AddMembersToElementTree to add special logic to # convert custom members to XML nodes. def _AddMembersToElementTree(self, tree): # Convert the members of this class which are XML child nodes. # This uses the class's _children dictionary to find the members which # should become XML child nodes. member_node_names = [values[0] for tag, values in self.__class__._children.iteritems()] for member_name in member_node_names: member = getattr(self, member_name) if member is None: pass elif isinstance(member, list): for instance in member: instance._BecomeChildElement(tree) else: member._BecomeChildElement(tree) # Convert the members of this class which are XML attributes. for xml_attribute, member_name in self.__class__._attributes.iteritems(): member = getattr(self, member_name) if member is not None: tree.attrib[xml_attribute] = member # Convert all special custom item attributes to nodes for name, custom in self.custom.iteritems(): custom._BecomeChildElement(tree) # Lastly, call the ExtensionContainers's _AddMembersToElementTree to # convert any extension attributes. atom.ExtensionContainer._AddMembersToElementTree(self, tree) def SpreadsheetsListFromString(xml_string): return atom.CreateClassFromXMLString(SpreadsheetsList, xml_string) element_tree = ElementTree.fromstring(xml_string) return _SpreadsheetsListFromElementTree(element_tree) class SpreadsheetsSpreadsheetsFeed(gdata.GDataFeed): """A feed containing Google Spreadsheets Spreadsheets""" _tag = 'feed' _namespace = atom.ATOM_NAMESPACE _children = gdata.GDataFeed._children.copy() _attributes = gdata.GDataFeed._attributes.copy() _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [SpreadsheetsSpreadsheet]) def SpreadsheetsSpreadsheetsFeedFromString(xml_string): return atom.CreateClassFromXMLString(SpreadsheetsSpreadsheetsFeed, xml_string) class SpreadsheetsWorksheetsFeed(gdata.GDataFeed): """A feed containing Google Spreadsheets Spreadsheets""" _tag = 'feed' _namespace = atom.ATOM_NAMESPACE _children = gdata.GDataFeed._children.copy() _attributes = gdata.GDataFeed._attributes.copy() _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [SpreadsheetsWorksheet]) def SpreadsheetsWorksheetsFeedFromString(xml_string): return atom.CreateClassFromXMLString(SpreadsheetsWorksheetsFeed, xml_string) class SpreadsheetsCellsFeed(gdata.BatchFeed): """A feed containing Google Spreadsheets Cells""" _tag = 'feed' _namespace = atom.ATOM_NAMESPACE _children = gdata.BatchFeed._children.copy() _attributes = gdata.BatchFeed._attributes.copy() _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [SpreadsheetsCell]) _children['{%s}rowCount' % GSPREADSHEETS_NAMESPACE] = ('row_count', RowCount) _children['{%s}colCount' % GSPREADSHEETS_NAMESPACE] = ('col_count', ColCount) def __init__(self, author=None, category=None, contributor=None, generator=None, icon=None, atom_id=None, link=None, logo=None, rights=None, subtitle=None, title=None, updated=None, entry=None, total_results=None, start_index=None, items_per_page=None, extension_elements=None, extension_attributes=None, text=None, row_count=None, col_count=None, interrupted=None): gdata.BatchFeed.__init__(self, author=author, category=category, contributor=contributor, generator=generator, icon=icon, atom_id=atom_id, link=link, logo=logo, rights=rights, subtitle=subtitle, title=title, updated=updated, entry=entry, total_results=total_results, start_index=start_index, items_per_page=items_per_page, extension_elements=extension_elements, extension_attributes=extension_attributes, text=text, interrupted=interrupted) self.row_count = row_count self.col_count = col_count def GetBatchLink(self): for link in self.link: if link.rel == 'http://schemas.google.com/g/2005#batch': return link return None def SpreadsheetsCellsFeedFromString(xml_string): return atom.CreateClassFromXMLString(SpreadsheetsCellsFeed, xml_string) class SpreadsheetsListFeed(gdata.GDataFeed): """A feed containing Google Spreadsheets Spreadsheets""" _tag = 'feed' _namespace = atom.ATOM_NAMESPACE _children = gdata.GDataFeed._children.copy() _attributes = gdata.GDataFeed._attributes.copy() _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [SpreadsheetsList]) def SpreadsheetsListFeedFromString(xml_string): return atom.CreateClassFromXMLString(SpreadsheetsListFeed, xml_string)
apache-2.0
codekaki/odoo
addons/project_issue/project_issue.py
13
31772
#-*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.addons.base_status.base_stage import base_stage from openerp.addons.project.project import _TASK_STATE from openerp.addons.crm import crm from datetime import datetime from openerp.osv import fields, osv, orm from openerp.tools.translate import _ import binascii import time from openerp import tools from openerp.tools import html2plaintext class project_issue_version(osv.osv): _name = "project.issue.version" _order = "name desc" _columns = { 'name': fields.char('Version Number', size=32, required=True), 'active': fields.boolean('Active', required=False), } _defaults = { 'active': 1, } project_issue_version() class project_issue(base_stage, osv.osv): _name = "project.issue" _description = "Project Issue" _order = "priority, create_date desc" _inherit = ['mail.thread', 'ir.needaction_mixin'] _track = { 'state': { 'project_issue.mt_issue_new': lambda self, cr, uid, obj, ctx=None: obj['state'] in ['new', 'draft'], 'project_issue.mt_issue_closed': lambda self, cr, uid, obj, ctx=None: obj['state'] == 'done', 'project_issue.mt_issue_started': lambda self, cr, uid, obj, ctx=None: obj['state'] == 'open', }, 'stage_id': { 'project_issue.mt_issue_stage': lambda self, cr, uid, obj, ctx=None: obj['state'] not in ['new', 'draft', 'done', 'open'], }, 'kanban_state': { 'project_issue.mt_issue_blocked': lambda self, cr, uid, obj, ctx=None: obj['kanban_state'] == 'blocked', }, } def create(self, cr, uid, vals, context=None): if context is None: context = {} if vals.get('project_id') and not context.get('default_project_id'): context['default_project_id'] = vals.get('project_id') # context: no_log, because subtype already handle this create_context = dict(context, mail_create_nolog=True) return super(project_issue, self).create(cr, uid, vals, context=create_context) def _get_default_partner(self, cr, uid, context=None): """ Override of base_stage to add project specific behavior """ project_id = self._get_default_project_id(cr, uid, context) if project_id: project = self.pool.get('project.project').browse(cr, uid, project_id, context=context) if project and project.partner_id: return project.partner_id.id return super(project_issue, self)._get_default_partner(cr, uid, context=context) def _get_default_project_id(self, cr, uid, context=None): """ Gives default project by checking if present in the context """ return self._resolve_project_id_from_context(cr, uid, context=context) def _get_default_stage_id(self, cr, uid, context=None): """ Gives default stage_id """ project_id = self._get_default_project_id(cr, uid, context=context) return self.stage_find(cr, uid, [], project_id, [('state', '=', 'draft')], context=context) def _resolve_project_id_from_context(self, cr, uid, context=None): """ Returns ID of project based on the value of 'default_project_id' context key, or None if it cannot be resolved to a single project. """ if context is None: context = {} if type(context.get('default_project_id')) in (int, long): return context.get('default_project_id') if isinstance(context.get('default_project_id'), basestring): project_name = context['default_project_id'] project_ids = self.pool.get('project.project').name_search(cr, uid, name=project_name, context=context) if len(project_ids) == 1: return int(project_ids[0][0]) return None def _read_group_stage_ids(self, cr, uid, ids, domain, read_group_order=None, access_rights_uid=None, context=None): access_rights_uid = access_rights_uid or uid stage_obj = self.pool.get('project.task.type') order = stage_obj._order # lame hack to allow reverting search, should just work in the trivial case if read_group_order == 'stage_id desc': order = "%s desc" % order # retrieve section_id from the context and write the domain # - ('id', 'in', 'ids'): add columns that should be present # - OR ('case_default', '=', True), ('fold', '=', False): add default columns that are not folded # - OR ('project_ids', 'in', project_id), ('fold', '=', False) if project_id: add project columns that are not folded search_domain = [] project_id = self._resolve_project_id_from_context(cr, uid, context=context) if project_id: search_domain += ['|', ('project_ids', '=', project_id)] search_domain += [('id', 'in', ids)] # perform search stage_ids = stage_obj._search(cr, uid, search_domain, order=order, access_rights_uid=access_rights_uid, context=context) result = stage_obj.name_get(cr, access_rights_uid, stage_ids, context=context) # restore order of the search result.sort(lambda x,y: cmp(stage_ids.index(x[0]), stage_ids.index(y[0]))) fold = {} for stage in stage_obj.browse(cr, access_rights_uid, stage_ids, context=context): fold[stage.id] = stage.fold or False return result, fold def _compute_day(self, cr, uid, ids, fields, args, context=None): """ @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of Openday’s IDs @return: difference between current date and log date @param context: A standard dictionary for contextual values """ cal_obj = self.pool.get('resource.calendar') res_obj = self.pool.get('resource.resource') res = {} for issue in self.browse(cr, uid, ids, context=context): # if the working hours on the project are not defined, use default ones (8 -> 12 and 13 -> 17 * 5), represented by None if not issue.project_id or not issue.project_id.resource_calendar_id: working_hours = None else: working_hours = issue.project_id.resource_calendar_id.id res[issue.id] = {} for field in fields: duration = 0 ans = False hours = 0 date_create = datetime.strptime(issue.create_date, "%Y-%m-%d %H:%M:%S") if field in ['working_hours_open','day_open']: if issue.date_open: date_open = datetime.strptime(issue.date_open, "%Y-%m-%d %H:%M:%S") ans = date_open - date_create date_until = issue.date_open #Calculating no. of working hours to open the issue hours = cal_obj._interval_hours_get(cr, uid, working_hours, date_create, date_open, timezone_from_uid=issue.user_id.id or uid, exclude_leaves=False, context=context) elif field in ['working_hours_close','day_close']: if issue.date_closed: date_close = datetime.strptime(issue.date_closed, "%Y-%m-%d %H:%M:%S") date_until = issue.date_closed ans = date_close - date_create #Calculating no. of working hours to close the issue hours = cal_obj._interval_hours_get(cr, uid, working_hours, date_create, date_close, timezone_from_uid=issue.user_id.id or uid, exclude_leaves=False, context=context) elif field in ['days_since_creation']: if issue.create_date: days_since_creation = datetime.today() - datetime.strptime(issue.create_date, "%Y-%m-%d %H:%M:%S") res[issue.id][field] = days_since_creation.days continue elif field in ['inactivity_days']: res[issue.id][field] = 0 if issue.date_action_last: inactive_days = datetime.today() - datetime.strptime(issue.date_action_last, '%Y-%m-%d %H:%M:%S') res[issue.id][field] = inactive_days.days continue if ans: resource_id = False if issue.user_id: resource_ids = res_obj.search(cr, uid, [('user_id','=',issue.user_id.id)]) if resource_ids and len(resource_ids): resource_id = resource_ids[0] duration = float(ans.days) + float(ans.seconds)/(24*3600) if field in ['working_hours_open','working_hours_close']: res[issue.id][field] = hours elif field in ['day_open','day_close']: res[issue.id][field] = duration return res def _hours_get(self, cr, uid, ids, field_names, args, context=None): task_pool = self.pool.get('project.task') res = {} for issue in self.browse(cr, uid, ids, context=context): progress = 0.0 if issue.task_id: progress = task_pool._hours_get(cr, uid, [issue.task_id.id], field_names, args, context=context)[issue.task_id.id]['progress'] res[issue.id] = {'progress' : progress} return res def on_change_project(self, cr, uid, ids, project_id, context=None): if project_id: project = self.pool.get('project.project').browse(cr, uid, project_id, context=context) if project and project.partner_id: return {'value': {'partner_id': project.partner_id.id}} return {} def _get_issue_task(self, cr, uid, ids, context=None): issues = [] issue_pool = self.pool.get('project.issue') for task in self.pool.get('project.task').browse(cr, uid, ids, context=context): issues += issue_pool.search(cr, uid, [('task_id','=',task.id)]) return issues def _get_issue_work(self, cr, uid, ids, context=None): issues = [] issue_pool = self.pool.get('project.issue') for work in self.pool.get('project.task.work').browse(cr, uid, ids, context=context): if work.task_id: issues += issue_pool.search(cr, uid, [('task_id','=',work.task_id.id)]) return issues _columns = { 'id': fields.integer('ID', readonly=True), 'name': fields.char('Issue', size=128, required=True), 'active': fields.boolean('Active', required=False), 'create_date': fields.datetime('Creation Date', readonly=True,select=True), 'write_date': fields.datetime('Update Date', readonly=True), 'days_since_creation': fields.function(_compute_day, string='Days since creation date', \ multi='compute_day', type="integer", help="Difference in days between creation date and current date"), 'date_deadline': fields.date('Deadline'), 'section_id': fields.many2one('crm.case.section', 'Sales Team', \ select=True, help='Sales team to which Case belongs to.\ Define Responsible user and Email account for mail gateway.'), 'partner_id': fields.many2one('res.partner', 'Contact', select=1), 'company_id': fields.many2one('res.company', 'Company'), 'description': fields.text('Private Note'), 'state': fields.related('stage_id', 'state', type="selection", store=True, selection=_TASK_STATE, string="Status", readonly=True, select=True, help='The status is set to \'Draft\', when a case is created.\ If the case is in progress the status is set to \'Open\'.\ When the case is over, the status is set to \'Done\'.\ If the case needs to be reviewed then the status is \ set to \'Pending\'.'), 'kanban_state': fields.selection([('normal', 'Normal'),('blocked', 'Blocked'),('done', 'Ready for next stage')], 'Kanban State', track_visibility='onchange', help="A Issue's kanban state indicates special situations affecting it:\n" " * Normal is the default situation\n" " * Blocked indicates something is preventing the progress of this issue\n" " * Ready for next stage indicates the issue is ready to be pulled to the next stage", readonly=True, required=False), 'email_from': fields.char('Email', size=128, help="These people will receive email.", select=1), 'email_cc': fields.char('Watchers Emails', size=256, help="These email addresses will be added to the CC field of all inbound and outbound emails for this record before being sent. Separate multiple email addresses with a comma"), 'date_open': fields.datetime('Opened', readonly=True,select=True), # Project Issue fields 'date_closed': fields.datetime('Closed', readonly=True,select=True), 'date': fields.datetime('Date'), 'channel_id': fields.many2one('crm.case.channel', 'Channel', help="Communication channel."), 'categ_ids': fields.many2many('project.category', string='Tags'), 'priority': fields.selection(crm.AVAILABLE_PRIORITIES, 'Priority', select=True), 'version_id': fields.many2one('project.issue.version', 'Version'), 'stage_id': fields.many2one ('project.task.type', 'Stage', track_visibility='onchange', select=True, domain="['&', ('fold', '=', False), ('project_ids', '=', project_id)]"), 'project_id': fields.many2one('project.project', 'Project', track_visibility='onchange', select=True), 'duration': fields.float('Duration'), 'task_id': fields.many2one('project.task', 'Task', domain="[('project_id','=',project_id)]"), 'day_open': fields.function(_compute_day, string='Days to Open', \ multi='compute_day', type="float", store=True), 'day_close': fields.function(_compute_day, string='Days to Close', \ multi='compute_day', type="float", store=True), 'user_id': fields.many2one('res.users', 'Assigned to', required=False, select=1, track_visibility='onchange'), 'working_hours_open': fields.function(_compute_day, string='Working Hours to Open the Issue', \ multi='compute_day', type="float", store=True), 'working_hours_close': fields.function(_compute_day, string='Working Hours to Close the Issue', \ multi='compute_day', type="float", store=True), 'inactivity_days': fields.function(_compute_day, string='Days since last action', \ multi='compute_day', type="integer", help="Difference in days between last action and current date"), 'color': fields.integer('Color Index'), 'user_email': fields.related('user_id', 'email', type='char', string='User Email', readonly=True), 'date_action_last': fields.datetime('Last Action', readonly=1), 'date_action_next': fields.datetime('Next Action', readonly=1), 'progress': fields.function(_hours_get, string='Progress (%)', multi='hours', group_operator="avg", help="Computed as: Time Spent / Total Time.", store = { 'project.issue': (lambda self, cr, uid, ids, c={}: ids, ['task_id'], 10), 'project.task': (_get_issue_task, ['work_ids', 'remaining_hours', 'planned_hours', 'state', 'stage_id'], 10), 'project.task.work': (_get_issue_work, ['hours'], 10), }), } _defaults = { 'active': 1, 'partner_id': lambda s, cr, uid, c: s._get_default_partner(cr, uid, c), 'email_from': lambda s, cr, uid, c: s._get_default_email(cr, uid, c), 'stage_id': lambda s, cr, uid, c: s._get_default_stage_id(cr, uid, c), 'section_id': lambda s, cr, uid, c: s._get_default_section_id(cr, uid, c), 'company_id': lambda s, cr, uid, c: s.pool.get('res.company')._company_default_get(cr, uid, 'crm.helpdesk', context=c), 'priority': crm.AVAILABLE_PRIORITIES[2][0], 'kanban_state': 'normal', 'user_id': lambda obj, cr, uid, context: uid, } _group_by_full = { 'stage_id': _read_group_stage_ids } def set_priority(self, cr, uid, ids, priority, *args): """Set lead priority """ return self.write(cr, uid, ids, {'priority' : priority}) def set_high_priority(self, cr, uid, ids, *args): """Set lead priority to high """ return self.set_priority(cr, uid, ids, '1') def set_normal_priority(self, cr, uid, ids, *args): """Set lead priority to normal """ return self.set_priority(cr, uid, ids, '3') def convert_issue_task(self, cr, uid, ids, context=None): if context is None: context = {} case_obj = self.pool.get('project.issue') data_obj = self.pool.get('ir.model.data') task_obj = self.pool.get('project.task') result = data_obj._get_id(cr, uid, 'project', 'view_task_search_form') res = data_obj.read(cr, uid, result, ['res_id']) id2 = data_obj._get_id(cr, uid, 'project', 'view_task_form2') id3 = data_obj._get_id(cr, uid, 'project', 'view_task_tree2') if id2: id2 = data_obj.browse(cr, uid, id2, context=context).res_id if id3: id3 = data_obj.browse(cr, uid, id3, context=context).res_id for bug in case_obj.browse(cr, uid, ids, context=context): new_task_id = task_obj.create(cr, uid, { 'name': bug.name, 'partner_id': bug.partner_id.id, 'description':bug.description, 'date_deadline': bug.date, 'project_id': bug.project_id.id, # priority must be in ['0','1','2','3','4'], while bug.priority is in ['1','2','3','4','5'] 'priority': str(int(bug.priority) - 1), 'user_id': bug.user_id.id, 'planned_hours': 0.0, }) vals = { 'task_id': new_task_id, 'stage_id': self.stage_find(cr, uid, [bug], bug.project_id.id, [('state', '=', 'pending')], context=context), } message = _("Project issue <b>converted</b> to task.") self.message_post(cr, uid, [bug.id], body=message, context=context) case_obj.write(cr, uid, [bug.id], vals, context=context) return { 'name': _('Tasks'), 'view_type': 'form', 'view_mode': 'form,tree', 'res_model': 'project.task', 'res_id': int(new_task_id), 'view_id': False, 'views': [(id2,'form'),(id3,'tree'),(False,'calendar'),(False,'graph')], 'type': 'ir.actions.act_window', 'search_view_id': res['res_id'], 'nodestroy': True } def copy(self, cr, uid, id, default=None, context=None): issue = self.read(cr, uid, id, ['name'], context=context) if not default: default = {} default = default.copy() default.update(name=_('%s (copy)') % (issue['name'])) return super(project_issue, self).copy(cr, uid, id, default=default, context=context) def write(self, cr, uid, ids, vals, context=None): #Update last action date every time the user changes the stage if 'stage_id' in vals: vals['date_action_last'] = time.strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT) if 'kanban_state' not in vals: vals.update(kanban_state='normal') state = self.pool.get('project.task.type').browse(cr, uid, vals['stage_id'], context=context).state for issue in self.browse(cr, uid, ids, context=context): # Change from draft to not draft EXCEPT cancelled: The issue has been opened -> set the opening date if issue.state == 'draft' and state not in ('draft', 'cancelled'): vals['date_open'] = time.strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT) # Change from not done to done: The issue has been closed -> set the closing date if issue.state != 'done' and state == 'done': vals['date_closed'] = time.strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT) return super(project_issue, self).write(cr, uid, ids, vals, context) def onchange_task_id(self, cr, uid, ids, task_id, context=None): if not task_id: return {'value': {}} task = self.pool.get('project.task').browse(cr, uid, task_id, context=context) return {'value': {'user_id': task.user_id.id, }} def case_reset(self, cr, uid, ids, context=None): """Resets case as draft """ res = super(project_issue, self).case_reset(cr, uid, ids, context) self.write(cr, uid, ids, {'date_open': False, 'date_closed': False}) return res # ------------------------------------------------------- # Stage management # ------------------------------------------------------- def set_kanban_state_blocked(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'kanban_state': 'blocked'}, context=context) def set_kanban_state_normal(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'kanban_state': 'normal'}, context=context) def set_kanban_state_done(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'kanban_state': 'done'}, context=context) def stage_find(self, cr, uid, cases, section_id, domain=[], order='sequence', context=None): """ Override of the base.stage method Parameter of the stage search taken from the issue: - type: stage type must be the same or 'both' - section_id: if set, stages must belong to this section or be a default case """ if isinstance(cases, (int, long)): cases = self.browse(cr, uid, cases, context=context) # collect all section_ids section_ids = [] if section_id: section_ids.append(section_id) for task in cases: if task.project_id: section_ids.append(task.project_id.id) # OR all section_ids and OR with case_default search_domain = [] if section_ids: search_domain += [('|')] * (len(section_ids)-1) for section_id in section_ids: search_domain.append(('project_ids', '=', section_id)) search_domain += list(domain) # perform search, return the first found stage_ids = self.pool.get('project.task.type').search(cr, uid, search_domain, order=order, context=context) if stage_ids: return stage_ids[0] return False def case_cancel(self, cr, uid, ids, context=None): """ Cancels case """ self.case_set(cr, uid, ids, 'cancelled', {'active': True}, context=context) return True def case_escalate(self, cr, uid, ids, context=None): cases = self.browse(cr, uid, ids) for case in cases: data = {} if case.project_id.project_escalation_id: data['project_id'] = case.project_id.project_escalation_id.id if case.project_id.project_escalation_id.user_id: data['user_id'] = case.project_id.project_escalation_id.user_id.id if case.task_id: self.pool.get('project.task').write(cr, uid, [case.task_id.id], {'project_id': data['project_id'], 'user_id': False}) else: raise osv.except_osv(_('Warning!'), _('You cannot escalate this issue.\nThe relevant Project has not configured the Escalation Project!')) self.case_set(cr, uid, ids, 'draft', data, context=context) return True # ------------------------------------------------------- # Mail gateway # ------------------------------------------------------- def message_get_reply_to(self, cr, uid, ids, context=None): """ Override to get the reply_to of the parent project. """ return [issue.project_id.message_get_reply_to()[0] if issue.project_id else False for issue in self.browse(cr, uid, ids, context=context)] def message_get_suggested_recipients(self, cr, uid, ids, context=None): recipients = super(project_issue, self).message_get_suggested_recipients(cr, uid, ids, context=context) try: for issue in self.browse(cr, uid, ids, context=context): if issue.partner_id: self._message_add_suggested_recipient(cr, uid, recipients, issue, partner=issue.partner_id, reason=_('Customer')) elif issue.email_from: self._message_add_suggested_recipient(cr, uid, recipients, issue, email=issue.email_from, reason=_('Customer Email')) except (osv.except_osv, orm.except_orm): # no read access rights -> just ignore suggested recipients because this imply modifying followers pass return recipients def message_new(self, cr, uid, msg, custom_values=None, context=None): """ Overrides mail_thread message_new that is called by the mailgateway through message_process. This override updates the document according to the email. """ if custom_values is None: custom_values = {} if context is None: context = {} context['state_to'] = 'draft' desc = html2plaintext(msg.get('body')) if msg.get('body') else '' defaults = { 'name': msg.get('subject') or _("No Subject"), 'description': desc, 'email_from': msg.get('from'), 'email_cc': msg.get('cc'), 'partner_id': msg.get('author_id', False), 'user_id': False, } defaults.update(custom_values) res_id = super(project_issue, self).message_new(cr, uid, msg, custom_values=defaults, context=context) return res_id def message_post(self, cr, uid, thread_id, body='', subject=None, type='notification', subtype=None, parent_id=False, attachments=None, context=None, content_subtype='html', **kwargs): """ Overrides mail_thread message_post so that we can set the date of last action field when a new message is posted on the issue. """ if context is None: context = {} res = super(project_issue, self).message_post(cr, uid, thread_id, body=body, subject=subject, type=type, subtype=subtype, parent_id=parent_id, attachments=attachments, context=context, content_subtype=content_subtype, **kwargs) if thread_id: self.write(cr, uid, thread_id, {'date_action_last': time.strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT)}, context=context) return res class project(osv.osv): _inherit = "project.project" def _get_alias_models(self, cr, uid, context=None): return [('project.task', "Tasks"), ("project.issue", "Issues")] def _issue_count(self, cr, uid, ids, field_name, arg, context=None): res = dict.fromkeys(ids, 0) issue_ids = self.pool.get('project.issue').search(cr, uid, [('project_id', 'in', ids)]) for issue in self.pool.get('project.issue').browse(cr, uid, issue_ids, context): if issue.state not in ('done', 'cancelled'): res[issue.project_id.id] += 1 return res _columns = { 'project_escalation_id' : fields.many2one('project.project','Project Escalation', help='If any issue is escalated from the current Project, it will be listed under the project selected here.', states={'close':[('readonly',True)], 'cancelled':[('readonly',True)]}), 'issue_count': fields.function(_issue_count, type='integer', string="Unclosed Issues"), } def _check_escalation(self, cr, uid, ids, context=None): project_obj = self.browse(cr, uid, ids[0], context=context) if project_obj.project_escalation_id: if project_obj.project_escalation_id.id == project_obj.id: return False return True _constraints = [ (_check_escalation, 'Error! You cannot assign escalation to the same project!', ['project_escalation_id']) ] project() class account_analytic_account(osv.osv): _inherit = 'account.analytic.account' _description = 'Analytic Account' _columns = { 'use_issues' : fields.boolean('Issues', help="Check this field if this project manages issues"), } def on_change_template(self, cr, uid, ids, template_id, context=None): res = super(account_analytic_account, self).on_change_template(cr, uid, ids, template_id, context=context) if template_id and 'value' in res: template = self.browse(cr, uid, template_id, context=context) res['value']['use_issues'] = template.use_issues return res def _trigger_project_creation(self, cr, uid, vals, context=None): if context is None: context = {} res = super(account_analytic_account, self)._trigger_project_creation(cr, uid, vals, context=context) return res or (vals.get('use_issues') and not 'project_creation_in_progress' in context) account_analytic_account() class project_project(osv.osv): _inherit = 'project.project' _defaults = { 'use_issues': True } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
Fusion-Rom/android_external_chromium_org
tools/export_tarball/export_v8_tarball.py
118
3960
#!/usr/bin/env python # Copyright (c) 2011 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Creates a tarball with V8 sources, but without .svn directories. This allows easy packaging of V8, synchronized with browser releases. Example usage: export_v8_tarball.py /foo/bar The above will create file /foo/bar/v8-VERSION.tar.bz2 if it doesn't exist. """ import optparse import os import re import subprocess import sys import tarfile _V8_MAJOR_VERSION_PATTERN = re.compile(r'#define\s+MAJOR_VERSION\s+(.*)') _V8_MINOR_VERSION_PATTERN = re.compile(r'#define\s+MINOR_VERSION\s+(.*)') _V8_BUILD_NUMBER_PATTERN = re.compile(r'#define\s+BUILD_NUMBER\s+(.*)') _V8_PATCH_LEVEL_PATTERN = re.compile(r'#define\s+PATCH_LEVEL\s+(.*)') _V8_PATTERNS = [ _V8_MAJOR_VERSION_PATTERN, _V8_MINOR_VERSION_PATTERN, _V8_BUILD_NUMBER_PATTERN, _V8_PATCH_LEVEL_PATTERN] _NONESSENTIAL_DIRS = ( 'third_party/icu', ) def GetV8Version(v8_directory): """ Returns version number as string based on the string contents of version.cc file. """ with open(os.path.join(v8_directory, 'src', 'version.cc')) as version_file: version_contents = version_file.read() version_components = [] for pattern in _V8_PATTERNS: version_components.append(pattern.search(version_contents).group(1).strip()) if version_components[len(version_components) - 1] == '0': version_components.pop() return '.'.join(version_components) def GetSourceDirectory(): return os.path.realpath( os.path.join(os.path.dirname(__file__), '..', '..', '..', 'src')) def GetV8Directory(): return os.path.join(GetSourceDirectory(), 'v8') # Workaround lack of the exclude parameter in add method in python-2.4. # TODO(phajdan.jr): remove the workaround when it's not needed on the bot. class MyTarFile(tarfile.TarFile): def set_remove_nonessential_files(self, remove): self.__remove_nonessential_files = remove def add(self, name, arcname=None, recursive=True, exclude=None, filter=None): head, tail = os.path.split(name) if tail in ('.svn', '.git'): return if self.__remove_nonessential_files: # Remove contents of non-essential directories, but preserve gyp files, # so that build/gyp_chromium can work. for nonessential_dir in _NONESSENTIAL_DIRS: dir_path = os.path.join(GetV8Directory(), nonessential_dir) if (name.startswith(dir_path) and os.path.isfile(name) and 'gyp' not in name): return tarfile.TarFile.add(self, name, arcname=arcname, recursive=recursive) def main(argv): parser = optparse.OptionParser() options, args = parser.parse_args(argv) if len(args) != 1: print 'You must provide only one argument: output file directory' return 1 v8_directory = GetV8Directory() if not os.path.exists(v8_directory): print 'Cannot find the v8 directory.' return 1 v8_version = GetV8Version(v8_directory) print 'Packaging V8 version %s...' % v8_version subprocess.check_call(["make", "dependencies"], cwd=v8_directory) output_basename = 'v8-%s' % v8_version # Package full tarball. output_fullname = os.path.join(args[0], output_basename + '.tar.bz2') if not os.path.exists(output_fullname): archive = MyTarFile.open(output_fullname, 'w:bz2') archive.set_remove_nonessential_files(False) try: archive.add(v8_directory, arcname=output_basename) finally: archive.close() # Package lite tarball. output_fullname = os.path.join(args[0], output_basename + '-lite.tar.bz2') if not os.path.exists(output_fullname): archive = MyTarFile.open(output_fullname, 'w:bz2') archive.set_remove_nonessential_files(True) try: archive.add(v8_directory, arcname=output_basename) finally: archive.close() return 0 if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
bsd-3-clause
XiaosongWei/chromium-crosswalk
components/test/data/autofill/merge/tools/serialize_profiles.py
137
2606
#!/usr/bin/env python # Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os.path import sqlite3 import sys from autofill_merge_common import SerializeProfiles, ColumnNameToFieldType def main(): """Serializes the autofill_profiles table from the specified database.""" if len(sys.argv) != 2: print "Usage: python serialize_profiles.py <path/to/database>" return 1 database = sys.argv[1] if not os.path.isfile(database): print "Cannot read database at \"%s\"" % database return 1 # Read the autofill_profile_names table. try: connection = sqlite3.connect(database, 0) cursor = connection.cursor() cursor.execute("SELECT * from autofill_profile_names;") except sqlite3.OperationalError: print ("Failed to read the autofill_profile_names table from \"%s\"" % database) raise # For backward-compatibility, the result of |cursor.description| is a list of # 7-tuples, in which the first item is the column name, and the remaining # items are 'None'. types = [ColumnNameToFieldType(item[0]) for item in cursor.description] profiles = {} for profile in cursor: guid = profile[0] profiles[guid] = zip(types, profile) # Read the autofill_profile_emails table. try: cursor.execute("SELECT * from autofill_profile_emails;") except sqlite3.OperationalError: print ("Failed to read the autofill_profile_emails table from \"%s\"" % database) raise types = [ColumnNameToFieldType(item[0]) for item in cursor.description] for profile in cursor: guid = profile[0] profiles[guid].extend(zip(types, profile)) # Read the autofill_profiles table. try: cursor.execute("SELECT * from autofill_profiles;") except sqlite3.OperationalError: print "Failed to read the autofill_profiles table from \"%s\"" % database raise types = [ColumnNameToFieldType(item[0]) for item in cursor.description] for profile in cursor: guid = profile[0] profiles[guid].extend(zip(types, profile)) # Read the autofill_profile_phones table. try: cursor.execute("SELECT * from autofill_profile_phones;") except sqlite3.OperationalError: print ("Failed to read the autofill_profile_phones table from \"%s\"" % database) raise for profile in cursor: guid = profile[0] profiles[guid].append(("PHONE_HOME_WHOLE_NUMBER", profile[2])) print SerializeProfiles(profiles.values()) return 0 if __name__ == '__main__': sys.exit(main())
bsd-3-clause
stackunderflow-stackptr/stackptr_web
crossbarconnect/client.py
1
8527
############################################################################### ## ## Copyright (C) 2012-2014 Tavendo GmbH ## ## Licensed under the Apache License, Version 2.0 (the "License"); ## you may not use this file except in compliance with the License. ## You may obtain a copy of the License at ## ## http://www.apache.org/licenses/LICENSE-2.0 ## ## Unless required by applicable law or agreed to in writing, software ## distributed under the License is distributed on an "AS IS" BASIS, ## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ## See the License for the specific language governing permissions and ## limitations under the License. ## ############################################################################### __all__ = ['Client'] try: import ssl _HAS_SSL = True except ImportError: _HAS_SSL = False import sys _HAS_SSL_CLIENT_CONTEXT = sys.version_info >= (2,7,9) import json import hmac import hashlib import base64 import random from datetime import datetime import six from six.moves.urllib import parse from six.moves.http_client import HTTPConnection, HTTPSConnection def _utcnow(): """ Get current time in UTC as ISO 8601 string. :returns str -- Current time as string in ISO 8601 format. """ now = datetime.utcnow() return now.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z" def _parse_url(url): """ Parses a Crossbar.io HTTP bridge URL. """ parsed = parse.urlparse(url) if parsed.scheme not in ["http", "https"]: raise Exception("invalid Push URL scheme '%s'" % parsed.scheme) if parsed.port is None or parsed.port == "": if parsed.scheme == "http": port = 80 elif parsed.scheme == "https": port = 443 else: raise Exception("logic error") else: port = int(parsed.port) if parsed.fragment is not None and parsed.fragment != "": raise Exception("invalid Push URL: non-empty fragment '%s" % parsed.fragment) if parsed.query is not None and parsed.query != "": raise Exception("invalid Push URL: non-empty query string '%s" % parsed.query) if parsed.path is not None and parsed.path != "": ppath = parsed.path path = parse.unquote(ppath) else: ppath = "/" path = ppath return {'secure': parsed.scheme == "https", 'host': parsed.hostname, 'port': port, 'path': path} class Client: """ Crossbar.io HTTP bridge client. """ def __init__(self, url, key = None, secret = None, timeout = 5, context = None): """ Create a new Crossbar.io push client. The only mandatory argument is the Push service endpoint of the Crossbar.io instance to push to. For signed pushes, provide authentication key and secret. If those are not given, unsigned pushes are performed. :param url: URL of the HTTP bridge of Crossbar.io (e.g. http://example.com:8080/push). :type url: str :param key: Optional key to use for signing requests. :type key: str :param secret: When using signed request, the secret corresponding to key. :type secret: str :param timeout: Timeout for requests. :type timeout: int :param context: If the HTTP bridge is running on HTTPS (that is securely over TLS), then the context provides the SSL settings the client should use (e.g. the certificate chain against which to verify the server certificate). This parameter is only available on Python 2.7.9+ and Python 3 (otherwise the parameter is silently ignored!). See: https://docs.python.org/2/library/ssl.html#ssl.SSLContext :type context: obj or None """ if six.PY2: if type(url) == str: url = six.u(url) if type(key) == str: key = six.u(key) if type(secret) == str: secret = six.u(secret) assert(type(url) == six.text_type) assert((key and secret) or (not key and not secret)) assert(key is None or type(key) == six.text_type) assert(secret is None or type(secret) == six.text_type) assert(type(timeout) == int) if _HAS_SSL and _HAS_SSL_CLIENT_CONTEXT: assert(context is None or isinstance(context, ssl.SSLContext)) self._seq = 1 self._key = key self._secret = secret self._endpoint = _parse_url(url) self._endpoint['headers'] = { "Content-type": "application/json", "User-agent": "crossbarconnect-python" } if self._endpoint['secure']: if not _HAS_SSL: raise Exception("Bridge URL is using HTTPS, but Python SSL module is missing") if _HAS_SSL_CLIENT_CONTEXT: self._connection = HTTPSConnection(self._endpoint['host'], self._endpoint['port'], timeout = timeout, context = context) else: self._connection = HTTPSConnection(self._endpoint['host'], self._endpoint['port'], timeout = timeout) else: self._connection = HTTPConnection(self._endpoint['host'], self._endpoint['port'], timeout = timeout) def publish(self, topic, *args, **kwargs): """ Publish an event to subscribers on specified topic via Crossbar.io HTTP bridge. The event payload (positional and keyword) can be of any type that can be serialized to JSON. If `kwargs` contains an `options` attribute, this is expected to be a dictionary with the following possible parameters: * `exclude`: A list of WAMP session IDs to exclude from receivers. * `eligible`: A list of WAMP session IDs eligible as receivers. :param topic: Topic to push to. :type topic: str :param args: Arbitrary application payload for the event (positional arguments). :type args: list :param kwargs: Arbitrary application payload for the event (keyword arguments). :type kwargs: dict :returns int -- The event publication ID assigned by the broker. """ if six.PY2 and type(topic) == str: topic = six.u(topic) assert(type(topic) == six.text_type) ## this will get filled and later serialized into HTTP/POST body ## event = { 'topic': topic } if 'options' in kwargs: event['options'] = kwargs.pop('options') assert(type(event['options']) == dict) if args: event['args'] = args if kwargs: event['kwargs'] = kwargs try: body = json.dumps(event, separators = (',',':')) if six.PY3: body = body.encode('utf8') except Exception as e: raise Exception("invalid event payload - not JSON serializable: {0}".format(e)) params = { 'timestamp': _utcnow(), 'seq': self._seq, } if self._key: ## if the request is to be signed, create extra fields and signature params['key'] = self._key params['nonce'] = random.randint(0, 9007199254740992) # HMAC[SHA256]_{secret} (key | timestamp | seq | nonce | body) => signature hm = hmac.new(self._secret.encode('utf8'), None, hashlib.sha256) hm.update(params['key'].encode('utf8')) hm.update(params['timestamp'].encode('utf8')) hm.update(u"{0}".format(params['seq']).encode('utf8')) hm.update(u"{0}".format(params['nonce']).encode('utf8')) hm.update(body) signature = base64.urlsafe_b64encode(hm.digest()) params['signature'] = signature self._seq += 1 path = "{0}?{1}".format(parse.quote(self._endpoint['path']), parse.urlencode(params)) ## now issue the HTTP/POST ## self._connection.request('POST', path, body, self._endpoint['headers']) response = self._connection.getresponse() response_body = response.read() if response.status not in [200, 202]: raise Exception("publication request failed {0} [{1}] - {2}".format(response.status, response.reason, response_body)) try: res = json.loads(response_body) except Exception as e: raise Exception("publication request bogus result - {0}".format(e)) return res['id']
agpl-3.0
scorphus/django
django/contrib/sites/models.py
316
3743
from __future__ import unicode_literals import string from django.core.exceptions import ImproperlyConfigured, ValidationError from django.db import models from django.db.models.signals import pre_delete, pre_save from django.http.request import split_domain_port from django.utils.encoding import python_2_unicode_compatible from django.utils.translation import ugettext_lazy as _ SITE_CACHE = {} def _simple_domain_name_validator(value): """ Validates that the given value contains no whitespaces to prevent common typos. """ if not value: return checks = ((s in value) for s in string.whitespace) if any(checks): raise ValidationError( _("The domain name cannot contain any spaces or tabs."), code='invalid', ) class SiteManager(models.Manager): use_in_migrations = True def _get_site_by_id(self, site_id): if site_id not in SITE_CACHE: site = self.get(pk=site_id) SITE_CACHE[site_id] = site return SITE_CACHE[site_id] def _get_site_by_request(self, request): host = request.get_host() try: # First attempt to look up the site by host with or without port. if host not in SITE_CACHE: SITE_CACHE[host] = self.get(domain__iexact=host) return SITE_CACHE[host] except Site.DoesNotExist: # Fallback to looking up site after stripping port from the host. domain, port = split_domain_port(host) if not port: raise if domain not in SITE_CACHE: SITE_CACHE[domain] = self.get(domain__iexact=domain) return SITE_CACHE[domain] def get_current(self, request=None): """ Returns the current Site based on the SITE_ID in the project's settings. If SITE_ID isn't defined, it returns the site with domain matching request.get_host(). The ``Site`` object is cached the first time it's retrieved from the database. """ from django.conf import settings if getattr(settings, 'SITE_ID', ''): site_id = settings.SITE_ID return self._get_site_by_id(site_id) elif request: return self._get_site_by_request(request) raise ImproperlyConfigured( "You're using the Django \"sites framework\" without having " "set the SITE_ID setting. Create a site in your database and " "set the SITE_ID setting or pass a request to " "Site.objects.get_current() to fix this error." ) def clear_cache(self): """Clears the ``Site`` object cache.""" global SITE_CACHE SITE_CACHE = {} @python_2_unicode_compatible class Site(models.Model): domain = models.CharField(_('domain name'), max_length=100, validators=[_simple_domain_name_validator], unique=True) name = models.CharField(_('display name'), max_length=50) objects = SiteManager() class Meta: db_table = 'django_site' verbose_name = _('site') verbose_name_plural = _('sites') ordering = ('domain',) def __str__(self): return self.domain def clear_site_cache(sender, **kwargs): """ Clears the cache (if primed) each time a site is saved or deleted """ instance = kwargs['instance'] using = kwargs['using'] try: del SITE_CACHE[instance.pk] except KeyError: pass try: del SITE_CACHE[Site.objects.using(using).get(pk=instance.pk).domain] except (KeyError, Site.DoesNotExist): pass pre_save.connect(clear_site_cache, sender=Site) pre_delete.connect(clear_site_cache, sender=Site)
bsd-3-clause
lexyan/SickBeard
lib/hachoir_parser/audio/modplug.py
90
10667
""" Modplug metadata inserted into module files. Doc: - http://modplug.svn.sourceforge.net/viewvc/modplug/trunk/modplug/soundlib/ Author: Christophe GISQUET <[email protected]> Creation: 10th February 2007 """ from lib.hachoir_core.field import (FieldSet, UInt32, UInt16, UInt8, Int8, Float32, RawBytes, String, GenericVector, ParserError) from lib.hachoir_core.endian import LITTLE_ENDIAN from lib.hachoir_core.text_handler import textHandler, hexadecimal MAX_ENVPOINTS = 32 def parseComments(parser): size = parser["block_size"].value if size > 0: yield String(parser, "comment", size) class MidiOut(FieldSet): static_size = 9*32*8 def createFields(self): for name in ("start", "stop", "tick", "noteon", "noteoff", "volume", "pan", "banksel", "program"): yield String(self, name, 32, strip='\0') class Command(FieldSet): static_size = 32*8 def createFields(self): start = self.absolute_address size = self.stream.searchBytesLength("\0", False, start) if size > 0: self.info("Command: %s" % self.stream.readBytes(start, size)) yield String(self, "command", size, strip='\0') yield RawBytes(self, "parameter", (self._size//8)-size) class MidiSFXExt(FieldSet): static_size = 16*32*8 def createFields(self): for index in xrange(16): yield Command(self, "command[]") class MidiZXXExt(FieldSet): static_size = 128*32*8 def createFields(self): for index in xrange(128): yield Command(self, "command[]") def parseMidiConfig(parser): yield MidiOut(parser, "midi_out") yield MidiSFXExt(parser, "sfx_ext") yield MidiZXXExt(parser, "zxx_ext") def parseChannelSettings(parser): size = parser["block_size"].value//4 if size > 0: yield GenericVector(parser, "settings", size, UInt32, "mix_plugin") def parseEQBands(parser): size = parser["block_size"].value//4 if size > 0: yield GenericVector(parser, "gains", size, UInt32, "band") class SoundMixPluginInfo(FieldSet): static_size = 128*8 def createFields(self): yield textHandler(UInt32(self, "plugin_id1"), hexadecimal) yield textHandler(UInt32(self, "plugin_id2"), hexadecimal) yield UInt32(self, "input_routing") yield UInt32(self, "output_routing") yield GenericVector(self, "routing_info", 4, UInt32, "reserved") yield String(self, "name", 32, strip='\0') yield String(self, "dll_name", 64, desc="Original DLL name", strip='\0') class ExtraData(FieldSet): def __init__(self, parent, name, desc=None): FieldSet.__init__(self, parent, name, desc) self._size = (4+self["size"].value)*8 def createFields(self): yield UInt32(self, "size") size = self["size"].value if size: yield RawBytes(self, "data", size) class XPlugData(FieldSet): def __init__(self, parent, name, desc=None): FieldSet.__init__(self, parent, name, desc) self._size = (4+self["size"].value)*8 def createFields(self): yield UInt32(self, "size") while not self.eof: yield UInt32(self, "marker") if self["marker"].value == 'DWRT': yield Float32(self, "dry_ratio") elif self["marker"].value == 'PORG': yield UInt32(self, "default_program") def parsePlugin(parser): yield SoundMixPluginInfo(parser, "info") # Check if VST setchunk present size = parser.stream.readBits(parser.absolute_address+parser.current_size, 32, LITTLE_ENDIAN) if 0 < size < parser.current_size + parser._size: yield ExtraData(parser, "extra_data") # Check if XPlugData is present size = parser.stream.readBits(parser.absolute_address+parser.current_size, 32, LITTLE_ENDIAN) if 0 < size < parser.current_size + parser._size: yield XPlugData(parser, "xplug_data") # Format: "XXXX": (type, count, name) EXTENSIONS = { # [email protected] "XTPM": { "..Fd": (UInt32, 1, "Flags"), "..OF": (UInt32, 1, "Fade out"), "..VG": (UInt32, 1, "Global Volume"), "...P": (UInt32, 1, "Panning"), "..EV": (UInt32, 1, "Volume Envelope"), "..EP": (UInt32, 1, "Panning Envelope"), ".EiP": (UInt32, 1, "Pitch Envelope"), ".SLV": (UInt8, 1, "Volume Loop Start"), ".ELV": (UInt8, 1, "Volume Loop End"), ".BSV": (UInt8, 1, "Volume Sustain Begin"), ".ESV": (UInt8, 1, "Volume Sustain End"), ".SLP": (UInt8, 1, "Panning Loop Start"), ".ELP": (UInt8, 1, "Panning Loop End"), ".BSP": (UInt8, 1, "Panning Substain Begin"), ".ESP": (UInt8, 1, "Padding Substain End"), "SLiP": (UInt8, 1, "Pitch Loop Start"), "ELiP": (UInt8, 1, "Pitch Loop End"), "BSiP": (UInt8, 1, "Pitch Substain Begin"), "ESiP": (UInt8, 1, "Pitch Substain End"), ".ANN": (UInt8, 1, "NNA"), ".TCD": (UInt8, 1, "DCT"), ".AND": (UInt8, 1, "DNA"), "..SP": (UInt8, 1, "Panning Swing"), "..SV": (UInt8, 1, "Volume Swing"), ".CFI": (UInt8, 1, "IFC"), ".RFI": (UInt8, 1, "IFR"), "..BM": (UInt32, 1, "Midi Bank"), "..PM": (UInt8, 1, "Midi Program"), "..CM": (UInt8, 1, "Midi Channel"), ".KDM": (UInt8, 1, "Midi Drum Key"), ".SPP": (Int8, 1, "PPS"), ".CPP": (UInt8, 1, "PPC"), ".[PV": (UInt32, MAX_ENVPOINTS, "Volume Points"), ".[PP": (UInt32, MAX_ENVPOINTS, "Panning Points"), "[PiP": (UInt32, MAX_ENVPOINTS, "Pitch Points"), ".[EV": (UInt8, MAX_ENVPOINTS, "Volume Enveloppe"), ".[EP": (UInt8, MAX_ENVPOINTS, "Panning Enveloppe"), "[EiP": (UInt8, MAX_ENVPOINTS, "Pitch Enveloppe"), ".[MN": (UInt8, 128, "Note Mapping"), "..[K": (UInt32, 128, "Keyboard"), "..[n": (String, 32, "Name"), ".[nf": (String, 12, "Filename"), ".PiM": (UInt8, 1, "MixPlug"), "..RV": (UInt16, 1, "Volume Ramping"), "...R": (UInt16, 1, "Resampling"), "..SC": (UInt8, 1, "Cut Swing"), "..SR": (UInt8, 1, "Res Swing"), "..MF": (UInt8, 1, "Filter Mode"), }, # See after "CODE tag dictionary", same place, elements with [EXT] "STPM": { "...C": (UInt32, 1, "Channels"), ".VWC": (None, 0, "CreatedWith version"), ".VGD": (None, 0, "Default global volume"), "..TD": (None, 0, "Default tempo"), "HIBE": (None, 0, "Embedded instrument header"), "VWSL": (None, 0, "LastSavedWith version"), ".MMP": (None, 0, "Plugin Mix mode"), ".BPR": (None, 0, "Rows per beat"), ".MPR": (None, 0, "Rows per measure"), "@PES": (None, 0, "Chunk separator"), ".APS": (None, 0, "Song Pre-amplification"), "..MT": (None, 0, "Tempo mode"), "VTSV": (None, 0, "VSTi volume"), } } class MPField(FieldSet): def __init__(self, parent, name, ext, desc=None): FieldSet.__init__(self, parent, name, desc) self.ext = ext self.info(self.createDescription()) self._size = (6+self["data_size"].value)*8 def createFields(self): # Identify tag code = self.stream.readBytes(self.absolute_address, 4) if code in self.ext: cls, count, comment = self.ext[code] else: cls, count, comment = RawBytes, 1, "Unknown tag" # Header yield String(self, "code", 4, comment) yield UInt16(self, "data_size") # Data if not cls: size = self["data_size"].value if size > 0: yield RawBytes(self, "data", size) elif cls in (String, RawBytes): yield cls(self, "value", count) else: if count > 1: yield GenericVector(self, "values", count, cls, "item") else: yield cls(self, "value") def createDescription(self): return "Element '%s', size %i" % \ (self["code"]._description, self["data_size"].value) def parseFields(parser): # Determine field names ext = EXTENSIONS[parser["block_type"].value] if ext == None: raise ParserError("Unknown parent '%s'" % parser["block_type"].value) # Parse fields addr = parser.absolute_address + parser.current_size while not parser.eof and parser.stream.readBytes(addr, 4) in ext: field = MPField(parser, "field[]", ext) yield field addr += field._size # Abort on unknown codes parser.info("End of extension '%s' when finding '%s'" % (parser["block_type"].value, parser.stream.readBytes(addr, 4))) class ModplugBlock(FieldSet): BLOCK_INFO = { "TEXT": ("comment", True, "Comment", parseComments), "MIDI": ("midi_config", True, "Midi configuration", parseMidiConfig), "XFHC": ("channel_settings", True, "Channel settings", parseChannelSettings), "XTPM": ("instrument_ext", False, "Instrument extensions", parseFields), "STPM": ("song_ext", False, "Song extensions", parseFields), } def __init__(self, parent, name, desc=None): FieldSet.__init__(self, parent, name, desc) self.parseBlock = parsePlugin t = self["block_type"].value self.has_size = False if t in self.BLOCK_INFO: self._name, self.has_size, desc, parseBlock = self.BLOCK_INFO[t] if callable(desc): self.createDescription = lambda: desc(self) if parseBlock: self.parseBlock = lambda: parseBlock(self) if self.has_size: self._size = 8*(self["block_size"].value + 8) def createFields(self): yield String(self, "block_type", 4) if self.has_size: yield UInt32(self, "block_size") if self.parseBlock: for field in self.parseBlock(): yield field if self.has_size: size = self["block_size"].value - (self.current_size//8) if size > 0: yield RawBytes(self, "data", size, "Unknown data") def ParseModplugMetadata(parser): while not parser.eof: block = ModplugBlock(parser, "block[]") yield block if block["block_type"].value == "STPM": break # More undocumented stuff: date ? size = (parser._size - parser.absolute_address - parser.current_size)//8 if size > 0: yield RawBytes(parser, "info", size)
gpl-3.0
VahidooX/DeepCCA
objectives.py
1
2281
import theano.tensor as T def cca_loss(outdim_size, use_all_singular_values): """ The main loss function (inner_cca_objective) is wrapped in this function due to the constraints imposed by Keras on objective functions """ def inner_cca_objective(y_true, y_pred): """ It is the loss function of CCA as introduced in the original paper. There can be other formulations. It is implemented by Theano tensor operations, and does not work on Tensorflow backend y_true is just ignored """ r1 = 1e-4 r2 = 1e-4 eps = 1e-12 o1 = o2 = y_pred.shape[1]//2 # unpack (separate) the output of networks for view 1 and view 2 H1 = y_pred[:, 0:o1].T H2 = y_pred[:, o1:o1+o2].T m = H1.shape[1] H1bar = H1 - (1.0 / m) * T.dot(H1, T.ones([m, m])) H2bar = H2 - (1.0 / m) * T.dot(H2, T.ones([m, m])) SigmaHat12 = (1.0 / (m - 1)) * T.dot(H1bar, H2bar.T) SigmaHat11 = (1.0 / (m - 1)) * T.dot(H1bar, H1bar.T) + r1 * T.eye(o1) SigmaHat22 = (1.0 / (m - 1)) * T.dot(H2bar, H2bar.T) + r2 * T.eye(o2) # Calculating the root inverse of covariance matrices by using eigen decomposition [D1, V1] = T.nlinalg.eigh(SigmaHat11) [D2, V2] = T.nlinalg.eigh(SigmaHat22) # Added to increase stability posInd1 = T.gt(D1, eps).nonzero()[0] D1 = D1[posInd1] V1 = V1[:, posInd1] posInd2 = T.gt(D2, eps).nonzero()[0] D2 = D2[posInd2] V2 = V2[:, posInd2] SigmaHat11RootInv = T.dot(T.dot(V1, T.nlinalg.diag(D1 ** -0.5)), V1.T) SigmaHat22RootInv = T.dot(T.dot(V2, T.nlinalg.diag(D2 ** -0.5)), V2.T) Tval = T.dot(T.dot(SigmaHat11RootInv, SigmaHat12), SigmaHat22RootInv) if use_all_singular_values: # all singular values are used to calculate the correlation corr = T.sqrt(T.nlinalg.trace(T.dot(Tval.T, Tval))) else: # just the top outdim_size singular values are used [U, V] = T.nlinalg.eigh(T.dot(Tval.T, Tval)) U = U[T.gt(U, eps).nonzero()[0]] U = U.sort() corr = T.sum(T.sqrt(U[0:outdim_size])) return -corr return inner_cca_objective
mit
nagyistoce/nips14-ssl
anglepy/paramgraphics.py
5
6074
import numpy as np import os import PIL.Image import pylab def save_images(images, directory, filename): if not os.path.exists(directory): os.makedirs(directory) w = sum(i.size[0] for i in images) mh = max(i.size[1] for i in images) result = PIL.Image.new("RGBA", (w, mh)) x = 0 for i in images: result.paste(i, (x, 0)) x += i.size[0] result.save(directory+'/'+filename) def scale_to_unit_interval(ndar, eps=1e-8): """ Scales all values in the ndarray ndar to be between 0 and 1 """ ndar = ndar.copy() ndar -= ndar.min() ndar *= 1.0 / (ndar.max() + eps) return ndar def tile_raster_images(X, img_shape, tile_shape, tile_spacing=(0, 0), scale=True, output_pixel_vals=True, colorImg=False): """ Transform an array with one flattened image per row, into an array in which images are reshaped and layed out like tiles on a floor. This function is useful for visualizing datasets whose rows are images, and also columns of matrices for transforming those rows (such as the first layer of a neural net). :type X: a 2-D ndarray or a tuple of 4 channels, elements of which can be 2-D ndarrays or None; :param X: a 2-D array in which every row is a flattened image. :type img_shape: tuple; (height, width) :param img_shape: the original shape of each image :type tile_shape: tuple; (rows, cols) :param tile_shape: the number of images to tile (rows, cols) :param output_pixel_vals: if output should be pixel values (i.e. int8 values) or floats :param scale_rows_to_unit_interval: if the values need to be scaled before being plotted to [0,1] or not :returns: array suitable for viewing as an image. (See:`PIL.Image.fromarray`.) :rtype: a 2-d array with same dtype as X. """ X = X * 1.0 # converts ints to floats if colorImg: channelSize = X.shape[1]/3 X = (X[:,0:channelSize], X[:,channelSize:2*channelSize], X[:,2*channelSize:3*channelSize], None) assert len(img_shape) == 2 assert len(tile_shape) == 2 assert len(tile_spacing) == 2 # The expression below can be re-written in a more C style as # follows : # # out_shape = [0,0] # out_shape[0] = (img_shape[0] + tile_spacing[0]) * tile_shape[0] - # tile_spacing[0] # out_shape[1] = (img_shape[1] + tile_spacing[1]) * tile_shape[1] - # tile_spacing[1] out_shape = [(ishp + tsp) * tshp - tsp for ishp, tshp, tsp in zip(img_shape, tile_shape, tile_spacing)] if isinstance(X, tuple): assert len(X) == 4 # Create an output np ndarray to store the image if output_pixel_vals: out_array = np.zeros((out_shape[0], out_shape[1], 4), dtype='uint8') else: out_array = np.zeros((out_shape[0], out_shape[1], 4), dtype=X.dtype) #colors default to 0, alpha defaults to 1 (opaque) if output_pixel_vals: channel_defaults = [0, 0, 0, 255] else: channel_defaults = [0., 0., 0., 1.] for i in xrange(4): if X[i] is None: # if channel is None, fill it with zeros of the correct # dtype out_array[:, :, i] = np.zeros(out_shape, dtype='uint8' if output_pixel_vals else out_array.dtype ) + channel_defaults[i] else: # use a recurrent call to compute the channel and store it # in the output xi = X[i] if scale: xi = (X[i] - X[i].min()) / (X[i].max() - X[i].min()) out_array[:, :, i] = tile_raster_images(xi, img_shape, tile_shape, tile_spacing, False, output_pixel_vals) return out_array else: # if we are dealing with only one channel H, W = img_shape Hs, Ws = tile_spacing # generate a matrix to store the output out_array = np.zeros(out_shape, dtype='uint8' if output_pixel_vals else X.dtype) for tile_row in xrange(tile_shape[0]): for tile_col in xrange(tile_shape[1]): if tile_row * tile_shape[1] + tile_col < X.shape[0]: if scale: # if we should scale values to be between 0 and 1 # do this by calling the `scale_to_unit_interval` # function tmp = X[tile_row * tile_shape[1] + tile_col].reshape(img_shape) this_img = scale_to_unit_interval(tmp) else: this_img = X[tile_row * tile_shape[1] + tile_col].reshape(img_shape) # add the slice to the corresponding position in the # output array out_array[ tile_row * (H+Hs): tile_row * (H + Hs) + H, tile_col * (W+Ws): tile_col * (W + Ws) + W ] \ = this_img * (255 if output_pixel_vals else 1) return out_array # Matrix to image def mat_to_img(w, dim_input, scale=False, colorImg=False, tile_spacing=(1,1), tile_shape=0): if tile_shape == 0: rowscols = int(w.shape[1]**0.5) tile_shape = (rowscols,rowscols) imgs = tile_raster_images(X=w.T, img_shape=dim_input, tile_shape=tile_shape, tile_spacing=tile_spacing, scale=scale, colorImg=colorImg) return PIL.Image.fromarray(imgs) # Show filters def imgshow(plt, w, dim_input, scale=False, colorImg=False, convertImgs=False, tile_spacing=(1,1)): if convertImgs: channelSize = w.shape[0]/3 w = tuple([w[channelSize*i:channelSize*(i+1)] for i in range(3)]) plt.axis('Off') pil_image = mat_to_img(w, dim_input, scale, colorImg, tile_spacing) plt.imshow(pil_image, cmap=pylab.gray(), origin='upper') return pil_image
mit
PLyczkowski/Sticky-Keymap
2.74/scripts/addons/EWOCprojects_tools/EWOCprojects_tools/__init__.py
2
6870
# ##### BEGIN GPL LICENSE BLOCK ##### # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # ##### END GPL LICENSE BLOCK ##### # Contributed to by # meta-androcto # bl_info = { "name": "EWOCprojects tools", "author": "Gert De Roost - paleajed", "version": (0, 4, 1), "blender": (2, 65, 0), "location": "View3D > Toolbar and View3D > Specials (W-key)", "description": "Edit mode tools - contrib version", "warning": "", "wiki_url": "", "tracker_url": "", "category": "Mesh"} if "bpy" in locals(): import imp imp.reload(mesh_edgetune) imp.reload(mesh_quadder) imp.reload(mesh_paredge) imp.reload(mesh_edgegrow) imp.reload(mesh_fanconnect) imp.reload(object_fastorigin) imp.reload(mesh_laprelax) imp.reload(mesh_polyredux) imp.reload(mesh_filletplus) imp.reload(mesh_innerweld) imp.reload(mesh_straightenplus) imp.reload(mesh_floodsel) imp.reload(mesh_deathguppie) imp.reload(mesh_selproject) imp.reload(object_creaprim) imp.reload(object_decouple) imp.reload(object_keeptrans) else: from . import mesh_edgetune from . import mesh_quadder from . import mesh_paredge from . import mesh_edgegrow from . import mesh_fanconnect from . import object_fastorigin from . import mesh_laprelax from . import mesh_polyredux from . import mesh_filletplus from . import mesh_innerweld from . import mesh_straightenplus from . import mesh_floodsel from . import mesh_deathguppie from . import mesh_selproject from . import object_creaprim from . import object_decouple from . import object_keeptrans import bpy from bpy.app.handlers import persistent class VIEW3D_MT_edit_mesh_paleajed(bpy.types.Menu): # Define the "Extras" menu bl_idname = "VIEW3D_MT_edit_mesh_paleajed" bl_label = "EWOCprojects tools" def draw(self, context): layout = self.layout layout.operator_context = "INVOKE_REGION_WIN" layout.operator("mesh.edgetune", text="EdgeTune") layout.operator("mesh.quadder", text="Quadder") layout.operator("mesh.paredge", text="ParEdge") layout.operator("mesh.edgegrow", text="EdgeGrow") layout.operator("mesh.fanconnect", text="FanConnect") layout.operator("object.fastorigin", text="FastOrigin") layout.operator("mesh.laprelax", text="LapRelax") layout.operator("mesh.polyredux", text="PolyRedux") layout.operator("mesh.filletplus", text="FilletPlus") layout.operator("mesh.innerweld", text="InnerWeld") layout.operator("mesh.straightenplus", text="StraightenPlus") layout.operator("mesh.floodsel", text="FloodSel") layout.operator("mesh.deathguppie", text="DeathGuppie") layout.operator("mesh.selproject", text="SelProject") class PaleajedPanel(bpy.types.Panel): bl_label = "EWOCprojects tools" bl_space_type = 'VIEW_3D' bl_region_type = 'TOOLS' bl_category = 'Tools' def draw(self, context): scn = bpy.context.scene layout = self.layout layout.operator("mesh.edgetune") layout.operator("mesh.quadder") layout.operator("mesh.paredge") if mesh_paredge.started: layout.prop(mesh_paredge.mainop, "Distance") layout.prop(mesh_paredge.mainop, "Both") if mesh_paredge.mainop.Both: layout.prop(mesh_paredge.mainop, "Cap") layout.operator("mesh.edgegrow") layout.operator("mesh.fanconnect") layout.operator("object.fastorigin") layout.operator("mesh.laprelax") layout.operator("mesh.polyredux") layout.operator("mesh.filletplus") layout.operator("mesh.innerweld") if not(mesh_straightenplus.started): layout.operator("mesh.straightenplus") else: layout.operator("mesh.straightenplus") msop = mesh_straightenplus.mainop layout.prop(msop, "Percentage") if mesh_straightenplus.started and msop.Percentage != msop.oldperc: msop.do_straighten() msop.oldperc = msop.Percentage layout.prop(msop, "CancelAxis") layout.operator("mesh.floodsel", text="Flood Sel") if mesh_floodsel.started: layout.prop(mesh_floodsel.mainop, "SelectMode") layout.prop(mesh_floodsel.mainop, "Multiple") layout.prop(mesh_floodsel.mainop, "Preselection") layout.prop(mesh_floodsel.mainop, "Diagonal") layout.operator("mesh.deathguppie") layout.prop(scn, "Smooth") layout.prop(scn, "Inner") if not(mesh_selproject.started): self.layout.operator("mesh.selproject", text="SelProject") if context.mode == 'EDIT_MESH': self.layout.prop(scn, "UseSel") if not(scn.UseSel): self.layout.prop(scn, "FromObject") else: scn.FromObject = bpy.context.active_object.name context.region.tag_redraw() else: self.layout.prop(scn, "FromObject") self.layout.prop(scn, "ToObject") else: self.layout.label(text="ENTER to confirm") self.layout.operator("object.creaprim") self.layout.prop(scn, "Name") self.layout.prop(scn, "Apply") if not(object_decouple.unparented): layout.operator("object.decouple", text="DeCouple") else: layout.operator("object.recouple", text="ReCouple") layout.operator("object.keeptrans") # Register all operators and panels # Define "Extras" menu def menu_func(self, context): self.layout.menu("VIEW3D_MT_edit_mesh_paleajed", icon='PLUGIN') def register(): bpy.app.handlers.scene_update_post.append(sceneupdate_handler) bpy.utils.register_module(__name__) # Add "Extras" menu to the "Add Mesh" menu bpy.types.VIEW3D_MT_edit_mesh_specials.prepend(menu_func) def unregister(): bpy.app.handlers.scene_update_post.remove(sceneupdate_handler) bpy.utils.unregister_module(__name__) # Remove "Extras" menu from the "Add Mesh" menu. bpy.types.VIEW3D_MT_edit_mesh_specials.remove(menu_func) if __name__ == "__main__": register() @persistent def sceneupdate_handler(dummy): scn = bpy.context.scene if not(list(scn.objects) == mesh_selproject.oldobjs): itemlist = [] objs = list(scn.objects) for ob in objs: if ob.type == 'MESH': itemlist.append((ob.name, ob.name, "Set From:")) bpy.types.Scene.FromObject = bpy.props.EnumProperty( items = itemlist, name = "From", description = "Object to project") bpy.types.Scene.ToObject = bpy.props.EnumProperty( items = itemlist, name = "To", description = "Object to project onto") mesh_selproject.oldobjs = list(scn.objects)
gpl-2.0
stshine/servo
tests/wpt/web-platform-tests/old-tests/webdriver/modal/alerts_test.py
141
6347
import os import sys import unittest sys.path.insert(1, os.path.abspath(os.path.join(__file__, "../.."))) import base_test from selenium.common import exceptions from selenium.webdriver.support import wait class AlertsTest(base_test.WebDriverBaseTest): def setUp(self): self.wait = wait.WebDriverWait(self.driver, 5, ignored_exceptions = [exceptions.NoAlertPresentException]) self.driver.get(self.webserver.where_is('modal/res/alerts.html')) def tearDown(self): try: self.driver.switch_to_alert().dismiss() except exceptions.NoAlertPresentException: pass # Alerts def test_should_allow_user_to_accept_an_alert(self): self.driver.find_element_by_css_selector('#alert').click() alert = self.wait.until(lambda x: x.switch_to_alert()) alert.accept() self.driver.current_url def test_should_allow_user_to_accept_an_alert_with_no_text(self): self.driver.find_element_by_css_selector('#empty-alert').click() alert = self.wait.until(lambda x: x.switch_to_alert()) alert.accept() self.driver.current_url def test_should_allow_user_to_dismiss_an_alert(self): self.driver.find_element_by_css_selector('#alert').click() alert = self.wait.until(lambda x: x.switch_to_alert()) alert.dismiss() self.driver.current_url def test_should_allow_user_to_get_text_of_an_alert(self): self.driver.find_element_by_css_selector('#alert').click() alert = self.wait.until(lambda x: x.switch_to_alert()) value = alert.text alert.accept() self.assertEquals('cheese', value) def test_setting_the_value_of_an_alert_throws(self): self.driver.find_element_by_css_selector('#alert').click() alert = self.wait.until(lambda x: x.switch_to_alert()) with self.assertRaises(exceptions.ElementNotVisibleException): alert.send_keys('cheese') alert.accept() def test_alert_should_not_allow_additional_commands_if_dismissed(self): self.driver.find_element_by_css_selector('#alert').click() alert = self.wait.until(lambda x: x.switch_to_alert()) alert.accept() with self.assertRaises(exceptions.NoAlertPresentException): alert.text # Prompts def test_should_allow_user_to_accept_a_prompt(self): self.driver.find_element_by_css_selector('#prompt').click() alert = self.wait.until(lambda x: x.switch_to_alert()) alert.accept() self.wait.until(lambda x: x.find_element_by_css_selector('#text').text == '') def test_should_allow_user_to_dismiss_a_prompt(self): self.driver.find_element_by_css_selector('#prompt').click() alert = self.wait.until(lambda x: x.switch_to_alert()) alert.dismiss() self.wait.until(lambda x: x.find_element_by_css_selector('#text').text == 'null') def test_should_allow_user_to_set_the_value_of_a_prompt(self): self.driver.find_element_by_css_selector('#prompt').click() alert = self.wait.until(lambda x: x.switch_to_alert()) alert.send_keys('cheese') alert.accept() self.wait.until(lambda x: x.find_element_by_css_selector('#text').text == 'cheese') def test_should_allow_user_to_get_text_of_a_prompt(self): self.driver.find_element_by_css_selector('#prompt').click() alert = self.wait.until(lambda x: x.switch_to_alert()) value = alert.text alert.accept() self.assertEquals('Enter something', value) def test_prompt_should_not_allow_additional_commands_if_dismissed(self): self.driver.find_element_by_css_selector('#prompt').click() alert = self.wait.until(lambda x: x.switch_to_alert()) alert.accept() with self.assertRaises(exceptions.NoAlertPresentException): alert.text def test_prompt_should_use_default_value_if_no_keys_sent(self): self.driver.find_element_by_css_selector('#prompt-with-default').click() alert = self.wait.until(lambda x: x.switch_to_alert()) alert.accept() self.wait.until(lambda x: x.find_element_by_css_selector('#text').text == 'This is a default value') def test_prompt_should_have_null_value_if_dismissed(self): self.driver.find_element_by_css_selector('#prompt-with-default').click() alert = self.wait.until(lambda x: x.switch_to_alert()) alert.dismiss() self.wait.until(lambda x: x.find_element_by_css_selector('#text').text == 'null') # Confirmations def test_should_allow_user_to_accept_a_confirm(self): self.driver.find_element_by_css_selector('#confirm').click() alert = self.wait.until(lambda x: x.switch_to_alert()) alert.accept() self.wait.until(lambda x: x.find_element_by_css_selector('#text').text == 'true') def test_should_allow_user_to_dismiss_a_confirm(self): self.driver.find_element_by_css_selector('#confirm').click() alert = self.wait.until(lambda x: x.switch_to_alert()) alert.dismiss() self.wait.until(lambda x: x.find_element_by_css_selector('#text').text == 'false') def test_setting_the_value_of_a_confirm_throws(self): self.driver.find_element_by_css_selector('#confirm').click() alert = self.wait.until(lambda x: x.switch_to_alert()) with self.assertRaises(exceptions.ElementNotVisibleException): alert.send_keys('cheese') alert.accept() def test_should_allow_user_to_get_text_of_a_confirm(self): self.driver.find_element_by_css_selector('#confirm').click() alert = self.wait.until(lambda x: x.switch_to_alert()) value = alert.text alert.accept() self.assertEquals('cheese', value) def test_confirm_should_not_allow_additional_commands_if_dismissed(self): self.driver.find_element_by_css_selector('#confirm').click() alert = self.wait.until(lambda x: x.switch_to_alert()) alert.accept() with self.assertRaises(exceptions.NoAlertPresentException): alert.text """ def test_switch_to_missing_alert_fails(self): with self.assertRaises(exceptions.NoAlertPresentException): self.driver.switch_to_alert() """ if __name__ == '__main__': unittest.main()
mpl-2.0
bdh1011/cupeye
venv/lib/python2.7/site-packages/werkzeug/utils.py
148
23063
# -*- coding: utf-8 -*- """ werkzeug.utils ~~~~~~~~~~~~~~ This module implements various utilities for WSGI applications. Most of them are used by the request and response wrappers but especially for middleware development it makes sense to use them without the wrappers. :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ import re import os import sys import pkgutil try: from html.entities import name2codepoint except ImportError: from htmlentitydefs import name2codepoint from werkzeug._compat import unichr, text_type, string_types, iteritems, \ reraise, PY2 from werkzeug._internal import _DictAccessorProperty, \ _parse_signature, _missing _format_re = re.compile(r'\$(?:(%s)|\{(%s)\})' % (('[a-zA-Z_][a-zA-Z0-9_]*',) * 2)) _entity_re = re.compile(r'&([^;]+);') _filename_ascii_strip_re = re.compile(r'[^A-Za-z0-9_.-]') _windows_device_files = ('CON', 'AUX', 'COM1', 'COM2', 'COM3', 'COM4', 'LPT1', 'LPT2', 'LPT3', 'PRN', 'NUL') class cached_property(object): """A decorator that converts a function into a lazy property. The function wrapped is called the first time to retrieve the result and then that calculated result is used the next time you access the value:: class Foo(object): @cached_property def foo(self): # calculate something important here return 42 The class has to have a `__dict__` in order for this property to work. """ # implementation detail: this property is implemented as non-data # descriptor. non-data descriptors are only invoked if there is # no entry with the same name in the instance's __dict__. # this allows us to completely get rid of the access function call # overhead. If one choses to invoke __get__ by hand the property # will still work as expected because the lookup logic is replicated # in __get__ for manual invocation. def __init__(self, func, name=None, doc=None): self.__name__ = name or func.__name__ self.__module__ = func.__module__ self.__doc__ = doc or func.__doc__ self.func = func def __get__(self, obj, type=None): if obj is None: return self value = obj.__dict__.get(self.__name__, _missing) if value is _missing: value = self.func(obj) obj.__dict__[self.__name__] = value return value class environ_property(_DictAccessorProperty): """Maps request attributes to environment variables. This works not only for the Werzeug request object, but also any other class with an environ attribute: >>> class Test(object): ... environ = {'key': 'value'} ... test = environ_property('key') >>> var = Test() >>> var.test 'value' If you pass it a second value it's used as default if the key does not exist, the third one can be a converter that takes a value and converts it. If it raises :exc:`ValueError` or :exc:`TypeError` the default value is used. If no default value is provided `None` is used. Per default the property is read only. You have to explicitly enable it by passing ``read_only=False`` to the constructor. """ read_only = True def lookup(self, obj): return obj.environ class header_property(_DictAccessorProperty): """Like `environ_property` but for headers.""" def lookup(self, obj): return obj.headers class HTMLBuilder(object): """Helper object for HTML generation. Per default there are two instances of that class. The `html` one, and the `xhtml` one for those two dialects. The class uses keyword parameters and positional parameters to generate small snippets of HTML. Keyword parameters are converted to XML/SGML attributes, positional arguments are used as children. Because Python accepts positional arguments before keyword arguments it's a good idea to use a list with the star-syntax for some children: >>> html.p(class_='foo', *[html.a('foo', href='foo.html'), ' ', ... html.a('bar', href='bar.html')]) u'<p class="foo"><a href="foo.html">foo</a> <a href="bar.html">bar</a></p>' This class works around some browser limitations and can not be used for arbitrary SGML/XML generation. For that purpose lxml and similar libraries exist. Calling the builder escapes the string passed: >>> html.p(html("<foo>")) u'<p>&lt;foo&gt;</p>' """ _entity_re = re.compile(r'&([^;]+);') _entities = name2codepoint.copy() _entities['apos'] = 39 _empty_elements = set([ 'area', 'base', 'basefont', 'br', 'col', 'command', 'embed', 'frame', 'hr', 'img', 'input', 'keygen', 'isindex', 'link', 'meta', 'param', 'source', 'wbr' ]) _boolean_attributes = set([ 'selected', 'checked', 'compact', 'declare', 'defer', 'disabled', 'ismap', 'multiple', 'nohref', 'noresize', 'noshade', 'nowrap' ]) _plaintext_elements = set(['textarea']) _c_like_cdata = set(['script', 'style']) def __init__(self, dialect): self._dialect = dialect def __call__(self, s): return escape(s) def __getattr__(self, tag): if tag[:2] == '__': raise AttributeError(tag) def proxy(*children, **arguments): buffer = '<' + tag for key, value in iteritems(arguments): if value is None: continue if key[-1] == '_': key = key[:-1] if key in self._boolean_attributes: if not value: continue if self._dialect == 'xhtml': value = '="' + key + '"' else: value = '' else: value = '="' + escape(value) + '"' buffer += ' ' + key + value if not children and tag in self._empty_elements: if self._dialect == 'xhtml': buffer += ' />' else: buffer += '>' return buffer buffer += '>' children_as_string = ''.join([text_type(x) for x in children if x is not None]) if children_as_string: if tag in self._plaintext_elements: children_as_string = escape(children_as_string) elif tag in self._c_like_cdata and self._dialect == 'xhtml': children_as_string = '/*<![CDATA[*/' + \ children_as_string + '/*]]>*/' buffer += children_as_string + '</' + tag + '>' return buffer return proxy def __repr__(self): return '<%s for %r>' % ( self.__class__.__name__, self._dialect ) html = HTMLBuilder('html') xhtml = HTMLBuilder('xhtml') def get_content_type(mimetype, charset): """Returns the full content type string with charset for a mimetype. If the mimetype represents text the charset will be appended as charset parameter, otherwise the mimetype is returned unchanged. :param mimetype: the mimetype to be used as content type. :param charset: the charset to be appended in case it was a text mimetype. :return: the content type. """ if mimetype.startswith('text/') or \ mimetype == 'application/xml' or \ (mimetype.startswith('application/') and mimetype.endswith('+xml')): mimetype += '; charset=' + charset return mimetype def format_string(string, context): """String-template format a string: >>> format_string('$foo and ${foo}s', dict(foo=42)) '42 and 42s' This does not do any attribute lookup etc. For more advanced string formattings have a look at the `werkzeug.template` module. :param string: the format string. :param context: a dict with the variables to insert. """ def lookup_arg(match): x = context[match.group(1) or match.group(2)] if not isinstance(x, string_types): x = type(string)(x) return x return _format_re.sub(lookup_arg, string) def secure_filename(filename): r"""Pass it a filename and it will return a secure version of it. This filename can then safely be stored on a regular file system and passed to :func:`os.path.join`. The filename returned is an ASCII only string for maximum portability. On windows systems the function also makes sure that the file is not named after one of the special device files. >>> secure_filename("My cool movie.mov") 'My_cool_movie.mov' >>> secure_filename("../../../etc/passwd") 'etc_passwd' >>> secure_filename(u'i contain cool \xfcml\xe4uts.txt') 'i_contain_cool_umlauts.txt' The function might return an empty filename. It's your responsibility to ensure that the filename is unique and that you generate random filename if the function returned an empty one. .. versionadded:: 0.5 :param filename: the filename to secure """ if isinstance(filename, text_type): from unicodedata import normalize filename = normalize('NFKD', filename).encode('ascii', 'ignore') if not PY2: filename = filename.decode('ascii') for sep in os.path.sep, os.path.altsep: if sep: filename = filename.replace(sep, ' ') filename = str(_filename_ascii_strip_re.sub('', '_'.join( filename.split()))).strip('._') # on nt a couple of special files are present in each folder. We # have to ensure that the target file is not such a filename. In # this case we prepend an underline if os.name == 'nt' and filename and \ filename.split('.')[0].upper() in _windows_device_files: filename = '_' + filename return filename def escape(s, quote=None): """Replace special characters "&", "<", ">" and (") to HTML-safe sequences. There is a special handling for `None` which escapes to an empty string. .. versionchanged:: 0.9 `quote` is now implicitly on. :param s: the string to escape. :param quote: ignored. """ if s is None: return '' elif hasattr(s, '__html__'): return text_type(s.__html__()) elif not isinstance(s, string_types): s = text_type(s) if quote is not None: from warnings import warn warn(DeprecationWarning('quote parameter is implicit now'), stacklevel=2) s = s.replace('&', '&amp;').replace('<', '&lt;') \ .replace('>', '&gt;').replace('"', "&quot;") return s def unescape(s): """The reverse function of `escape`. This unescapes all the HTML entities, not only the XML entities inserted by `escape`. :param s: the string to unescape. """ def handle_match(m): name = m.group(1) if name in HTMLBuilder._entities: return unichr(HTMLBuilder._entities[name]) try: if name[:2] in ('#x', '#X'): return unichr(int(name[2:], 16)) elif name.startswith('#'): return unichr(int(name[1:])) except ValueError: pass return u'' return _entity_re.sub(handle_match, s) def redirect(location, code=302, Response=None): """Returns a response object (a WSGI application) that, if called, redirects the client to the target location. Supported codes are 301, 302, 303, 305, and 307. 300 is not supported because it's not a real redirect and 304 because it's the answer for a request with a request with defined If-Modified-Since headers. .. versionadded:: 0.6 The location can now be a unicode string that is encoded using the :func:`iri_to_uri` function. .. versionadded:: 0.10 The class used for the Response object can now be passed in. :param location: the location the response should redirect to. :param code: the redirect status code. defaults to 302. :param class Response: a Response class to use when instantiating a response. The default is :class:`werkzeug.wrappers.Response` if unspecified. """ if Response is None: from werkzeug.wrappers import Response display_location = escape(location) if isinstance(location, text_type): # Safe conversion is necessary here as we might redirect # to a broken URI scheme (for instance itms-services). from werkzeug.urls import iri_to_uri location = iri_to_uri(location, safe_conversion=True) response = Response( '<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">\n' '<title>Redirecting...</title>\n' '<h1>Redirecting...</h1>\n' '<p>You should be redirected automatically to target URL: ' '<a href="%s">%s</a>. If not click the link.' % (escape(location), display_location), code, mimetype='text/html') response.headers['Location'] = location return response def append_slash_redirect(environ, code=301): """Redirects to the same URL but with a slash appended. The behavior of this function is undefined if the path ends with a slash already. :param environ: the WSGI environment for the request that triggers the redirect. :param code: the status code for the redirect. """ new_path = environ['PATH_INFO'].strip('/') + '/' query_string = environ.get('QUERY_STRING') if query_string: new_path += '?' + query_string return redirect(new_path, code) def import_string(import_name, silent=False): """Imports an object based on a string. This is useful if you want to use import paths as endpoints or something similar. An import path can be specified either in dotted notation (``xml.sax.saxutils.escape``) or with a colon as object delimiter (``xml.sax.saxutils:escape``). If `silent` is True the return value will be `None` if the import fails. :param import_name: the dotted name for the object to import. :param silent: if set to `True` import errors are ignored and `None` is returned instead. :return: imported object """ # force the import name to automatically convert to strings # __import__ is not able to handle unicode strings in the fromlist # if the module is a package import_name = str(import_name).replace(':', '.') try: try: __import__(import_name) except ImportError: if '.' not in import_name: raise else: return sys.modules[import_name] module_name, obj_name = import_name.rsplit('.', 1) try: module = __import__(module_name, None, None, [obj_name]) except ImportError: # support importing modules not yet set up by the parent module # (or package for that matter) module = import_string(module_name) try: return getattr(module, obj_name) except AttributeError as e: raise ImportError(e) except ImportError as e: if not silent: reraise( ImportStringError, ImportStringError(import_name, e), sys.exc_info()[2]) def find_modules(import_path, include_packages=False, recursive=False): """Finds all the modules below a package. This can be useful to automatically import all views / controllers so that their metaclasses / function decorators have a chance to register themselves on the application. Packages are not returned unless `include_packages` is `True`. This can also recursively list modules but in that case it will import all the packages to get the correct load path of that module. :param import_name: the dotted name for the package to find child modules. :param include_packages: set to `True` if packages should be returned, too. :param recursive: set to `True` if recursion should happen. :return: generator """ module = import_string(import_path) path = getattr(module, '__path__', None) if path is None: raise ValueError('%r is not a package' % import_path) basename = module.__name__ + '.' for importer, modname, ispkg in pkgutil.iter_modules(path): modname = basename + modname if ispkg: if include_packages: yield modname if recursive: for item in find_modules(modname, include_packages, True): yield item else: yield modname def validate_arguments(func, args, kwargs, drop_extra=True): """Checks if the function accepts the arguments and keyword arguments. Returns a new ``(args, kwargs)`` tuple that can safely be passed to the function without causing a `TypeError` because the function signature is incompatible. If `drop_extra` is set to `True` (which is the default) any extra positional or keyword arguments are dropped automatically. The exception raised provides three attributes: `missing` A set of argument names that the function expected but where missing. `extra` A dict of keyword arguments that the function can not handle but where provided. `extra_positional` A list of values that where given by positional argument but the function cannot accept. This can be useful for decorators that forward user submitted data to a view function:: from werkzeug.utils import ArgumentValidationError, validate_arguments def sanitize(f): def proxy(request): data = request.values.to_dict() try: args, kwargs = validate_arguments(f, (request,), data) except ArgumentValidationError: raise BadRequest('The browser failed to transmit all ' 'the data expected.') return f(*args, **kwargs) return proxy :param func: the function the validation is performed against. :param args: a tuple of positional arguments. :param kwargs: a dict of keyword arguments. :param drop_extra: set to `False` if you don't want extra arguments to be silently dropped. :return: tuple in the form ``(args, kwargs)``. """ parser = _parse_signature(func) args, kwargs, missing, extra, extra_positional = parser(args, kwargs)[:5] if missing: raise ArgumentValidationError(tuple(missing)) elif (extra or extra_positional) and not drop_extra: raise ArgumentValidationError(None, extra, extra_positional) return tuple(args), kwargs def bind_arguments(func, args, kwargs): """Bind the arguments provided into a dict. When passed a function, a tuple of arguments and a dict of keyword arguments `bind_arguments` returns a dict of names as the function would see it. This can be useful to implement a cache decorator that uses the function arguments to build the cache key based on the values of the arguments. :param func: the function the arguments should be bound for. :param args: tuple of positional arguments. :param kwargs: a dict of keyword arguments. :return: a :class:`dict` of bound keyword arguments. """ args, kwargs, missing, extra, extra_positional, \ arg_spec, vararg_var, kwarg_var = _parse_signature(func)(args, kwargs) values = {} for (name, has_default, default), value in zip(arg_spec, args): values[name] = value if vararg_var is not None: values[vararg_var] = tuple(extra_positional) elif extra_positional: raise TypeError('too many positional arguments') if kwarg_var is not None: multikw = set(extra) & set([x[0] for x in arg_spec]) if multikw: raise TypeError('got multiple values for keyword argument ' + repr(next(iter(multikw)))) values[kwarg_var] = extra elif extra: raise TypeError('got unexpected keyword argument ' + repr(next(iter(extra)))) return values class ArgumentValidationError(ValueError): """Raised if :func:`validate_arguments` fails to validate""" def __init__(self, missing=None, extra=None, extra_positional=None): self.missing = set(missing or ()) self.extra = extra or {} self.extra_positional = extra_positional or [] ValueError.__init__(self, 'function arguments invalid. (' '%d missing, %d additional)' % ( len(self.missing), len(self.extra) + len(self.extra_positional) )) class ImportStringError(ImportError): """Provides information about a failed :func:`import_string` attempt.""" #: String in dotted notation that failed to be imported. import_name = None #: Wrapped exception. exception = None def __init__(self, import_name, exception): self.import_name = import_name self.exception = exception msg = ( 'import_string() failed for %r. Possible reasons are:\n\n' '- missing __init__.py in a package;\n' '- package or module path not included in sys.path;\n' '- duplicated package or module name taking precedence in ' 'sys.path;\n' '- missing module, class, function or variable;\n\n' 'Debugged import:\n\n%s\n\n' 'Original exception:\n\n%s: %s') name = '' tracked = [] for part in import_name.replace(':', '.').split('.'): name += (name and '.') + part imported = import_string(name, silent=True) if imported: tracked.append((name, getattr(imported, '__file__', None))) else: track = ['- %r found in %r.' % (n, i) for n, i in tracked] track.append('- %r not found.' % name) msg = msg % (import_name, '\n'.join(track), exception.__class__.__name__, str(exception)) break ImportError.__init__(self, msg) def __repr__(self): return '<%s(%r, %r)>' % (self.__class__.__name__, self.import_name, self.exception) # circular dependencies from werkzeug.http import quote_header_value, unquote_header_value, \ cookie_date # DEPRECATED # these objects were previously in this module as well. we import # them here for backwards compatibility with old pickles. from werkzeug.datastructures import MultiDict, CombinedMultiDict, \ Headers, EnvironHeaders from werkzeug.http import parse_cookie, dump_cookie
bsd-3-clause
tersmitten/ansible
lib/ansible/modules/cloud/google/gcp_compute_interconnect_attachment.py
4
18037
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (C) 2017 Google # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # ---------------------------------------------------------------------------- # # *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** # # ---------------------------------------------------------------------------- # # This file is automatically generated by Magic Modules and manual # changes will be clobbered when the file is regenerated. # # Please read more about how to change this file at # https://www.github.com/GoogleCloudPlatform/magic-modules # # ---------------------------------------------------------------------------- from __future__ import absolute_import, division, print_function __metaclass__ = type ################################################################################ # Documentation ################################################################################ ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: gcp_compute_interconnect_attachment description: - Represents an InterconnectAttachment (VLAN attachment) resource. For more information, see Creating VLAN Attachments. short_description: Creates a GCP InterconnectAttachment version_added: 2.8 author: Google Inc. (@googlecloudplatform) requirements: - python >= 2.6 - requests >= 2.18.4 - google-auth >= 1.3.0 options: state: description: - Whether the given object should exist in GCP choices: - present - absent default: present interconnect: description: - URL of the underlying Interconnect object that this attachment's traffic will traverse through. Required if type is DEDICATED, must not be set if type is PARTNER. required: false description: description: - An optional description of this resource. required: false edge_availability_domain: description: - Desired availability domain for the attachment. Only available for type PARTNER, at creation time. For improved reliability, customers should configure a pair of attachments with one per availability domain. The selected availability domain will be provided to the Partner via the pairing key so that the provisioned circuit will lie in the specified domain. If not specified, the value will default to AVAILABILITY_DOMAIN_ANY. required: false type: description: - The type of InterconnectAttachment you wish to create. Defaults to DEDICATED. required: false choices: - DEDICATED - PARTNER - PARTNER_PROVIDER router: description: - URL of the cloud router to be used for dynamic routing. This router must be in the same region as this InterconnectAttachment. The InterconnectAttachment will automatically connect the Interconnect to the network & region within which the Cloud Router is configured. - 'This field represents a link to a Router resource in GCP. It can be specified in two ways. First, you can place a dictionary with key ''selfLink'' and value of your resource''s selfLink Alternatively, you can add `register: name-of-resource` to a gcp_compute_router task and then set this router field to "{{ name-of-resource }}"' required: true name: description: - Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. required: true candidate_subnets: description: - Up to 16 candidate prefixes that can be used to restrict the allocation of cloudRouterIpAddress and customerRouterIpAddress for this attachment. - All prefixes must be within link-local address space (169.254.0.0/16) and must be /29 or shorter (/28, /27, etc). Google will attempt to select an unused /29 from the supplied candidate prefix(es). The request will fail if all possible /29s are in use on Google's edge. If not supplied, Google will randomly select an unused /29 from all of link-local space. required: false vlan_tag8021q: description: - The IEEE 802.1Q VLAN tag for this attachment, in the range 2-4094. required: false region: description: - Region where the regional interconnect attachment resides. required: true extends_documentation_fragment: gcp ''' EXAMPLES = ''' - name: create a interconnect attachment gcp_compute_interconnect_attachment: name: test_object region: us-central1 project: test_project auth_kind: serviceaccount interconnect: https://googleapis.com/compute/v1/projects/test_project/global/interconnects/... router: https://googleapis.com/compute/v1/projects/test_project/regions/us-central1/routers/... service_account_file: "/tmp/auth.pem" state: present register: disk ''' RETURN = ''' cloudRouterIpAddress: description: - IPv4 address + prefix length to be configured on Cloud Router Interface for this interconnect attachment. returned: success type: str customerRouterIpAddress: description: - IPv4 address + prefix length to be configured on the customer router subinterface for this interconnect attachment. returned: success type: str interconnect: description: - URL of the underlying Interconnect object that this attachment's traffic will traverse through. Required if type is DEDICATED, must not be set if type is PARTNER. returned: success type: str description: description: - An optional description of this resource. returned: success type: str edgeAvailabilityDomain: description: - Desired availability domain for the attachment. Only available for type PARTNER, at creation time. For improved reliability, customers should configure a pair of attachments with one per availability domain. The selected availability domain will be provided to the Partner via the pairing key so that the provisioned circuit will lie in the specified domain. If not specified, the value will default to AVAILABILITY_DOMAIN_ANY. returned: success type: str pairingKey: description: - '[Output only for type PARTNER. Not present for DEDICATED]. The opaque identifier of an PARTNER attachment used to initiate provisioning with a selected partner. Of the form "XXXXX/region/domain" .' returned: success type: str partnerAsn: description: - "[Output only for type PARTNER. Not present for DEDICATED]. Optional BGP ASN for the router that should be supplied by a layer 3 Partner if they configured BGP on behalf of the customer." returned: success type: str privateInterconnectInfo: description: - Information specific to an InterconnectAttachment. This property is populated if the interconnect that this is attached to is of type DEDICATED. returned: success type: complex contains: tag8021q: description: - 802.1q encapsulation tag to be used for traffic between Google and the customer, going to and from this network and region. returned: success type: int type: description: - The type of InterconnectAttachment you wish to create. Defaults to DEDICATED. returned: success type: str state: description: - "[Output Only] The current state of this attachment's functionality." returned: success type: str googleReferenceId: description: - Google reference ID, to be used when raising support tickets with Google or otherwise to debug backend connectivity issues. returned: success type: str router: description: - URL of the cloud router to be used for dynamic routing. This router must be in the same region as this InterconnectAttachment. The InterconnectAttachment will automatically connect the Interconnect to the network & region within which the Cloud Router is configured. returned: success type: dict creationTimestamp: description: - Creation timestamp in RFC3339 text format. returned: success type: str id: description: - The unique identifier for the resource. This identifier is defined by the server. returned: success type: str name: description: - Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. returned: success type: str candidateSubnets: description: - Up to 16 candidate prefixes that can be used to restrict the allocation of cloudRouterIpAddress and customerRouterIpAddress for this attachment. - All prefixes must be within link-local address space (169.254.0.0/16) and must be /29 or shorter (/28, /27, etc). Google will attempt to select an unused /29 from the supplied candidate prefix(es). The request will fail if all possible /29s are in use on Google's edge. If not supplied, Google will randomly select an unused /29 from all of link-local space. returned: success type: list vlanTag8021q: description: - The IEEE 802.1Q VLAN tag for this attachment, in the range 2-4094. returned: success type: int region: description: - Region where the regional interconnect attachment resides. returned: success type: str ''' ################################################################################ # Imports ################################################################################ from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest, remove_nones_from_dict, replace_resource_dict import json import re import time ################################################################################ # Main ################################################################################ def main(): """Main function""" module = GcpModule( argument_spec=dict( state=dict(default='present', choices=['present', 'absent'], type='str'), interconnect=dict(type='str'), description=dict(type='str'), edge_availability_domain=dict(type='str'), type=dict(type='str', choices=['DEDICATED', 'PARTNER', 'PARTNER_PROVIDER']), router=dict(required=True, type='dict'), name=dict(required=True, type='str'), candidate_subnets=dict(type='list', elements='str'), vlan_tag8021q=dict(type='int'), region=dict(required=True, type='str'), ) ) if not module.params['scopes']: module.params['scopes'] = ['https://www.googleapis.com/auth/compute'] state = module.params['state'] kind = 'compute#interconnectAttachment' fetch = fetch_resource(module, self_link(module), kind) changed = False if fetch: if state == 'present': if is_different(module, fetch): update(module, self_link(module), kind) fetch = fetch_resource(module, self_link(module), kind) changed = True else: delete(module, self_link(module), kind) fetch = {} changed = True else: if state == 'present': fetch = create(module, collection(module), kind) changed = True else: fetch = {} fetch.update({'changed': changed}) module.exit_json(**fetch) def create(module, link, kind): auth = GcpSession(module, 'compute') return wait_for_operation(module, auth.post(link, resource_to_request(module))) def update(module, link, kind): delete(module, self_link(module), kind) create(module, collection(module), kind) def delete(module, link, kind): auth = GcpSession(module, 'compute') return wait_for_operation(module, auth.delete(link)) def resource_to_request(module): request = { u'kind': 'compute#interconnectAttachment', u'interconnect': module.params.get('interconnect'), u'description': module.params.get('description'), u'edgeAvailabilityDomain': module.params.get('edge_availability_domain'), u'type': module.params.get('type'), u'router': replace_resource_dict(module.params.get(u'router', {}), 'selfLink'), u'name': module.params.get('name'), u'candidateSubnets': module.params.get('candidate_subnets'), u'vlanTag8021q': module.params.get('vlan_tag8021q'), } return_vals = {} for k, v in request.items(): if v or v is False: return_vals[k] = v return return_vals def fetch_resource(module, link, kind, allow_not_found=True): auth = GcpSession(module, 'compute') return return_if_object(module, auth.get(link), kind, allow_not_found) def self_link(module): return "https://www.googleapis.com/compute/v1/projects/{project}/regions/{region}/interconnectAttachments/{name}".format(**module.params) def collection(module): return "https://www.googleapis.com/compute/v1/projects/{project}/regions/{region}/interconnectAttachments".format(**module.params) def return_if_object(module, response, kind, allow_not_found=False): # If not found, return nothing. if allow_not_found and response.status_code == 404: return None # If no content, return nothing. if response.status_code == 204: return None try: module.raise_for_status(response) result = response.json() except getattr(json.decoder, 'JSONDecodeError', ValueError): module.fail_json(msg="Invalid JSON response with error: %s" % response.text) if navigate_hash(result, ['error', 'errors']): module.fail_json(msg=navigate_hash(result, ['error', 'errors'])) return result def is_different(module, response): request = resource_to_request(module) response = response_to_hash(module, response) # Remove all output-only from response. response_vals = {} for k, v in response.items(): if k in request: response_vals[k] = v request_vals = {} for k, v in request.items(): if k in response: request_vals[k] = v return GcpRequest(request_vals) != GcpRequest(response_vals) # Remove unnecessary properties from the response. # This is for doing comparisons with Ansible's current parameters. def response_to_hash(module, response): return { u'cloudRouterIpAddress': response.get(u'cloudRouterIpAddress'), u'customerRouterIpAddress': response.get(u'customerRouterIpAddress'), u'interconnect': response.get(u'interconnect'), u'description': response.get(u'description'), u'edgeAvailabilityDomain': response.get(u'edgeAvailabilityDomain'), u'pairingKey': response.get(u'pairingKey'), u'partnerAsn': response.get(u'partnerAsn'), u'privateInterconnectInfo': InterconnectAttachmentPrivateinterconnectinfo(response.get(u'privateInterconnectInfo', {}), module).from_response(), u'type': response.get(u'type'), u'state': response.get(u'state'), u'googleReferenceId': response.get(u'googleReferenceId'), u'router': response.get(u'router'), u'creationTimestamp': response.get(u'creationTimestamp'), u'id': response.get(u'id'), u'name': response.get(u'name'), u'candidateSubnets': response.get(u'candidateSubnets'), u'vlanTag8021q': response.get(u'vlanTag8021q'), } def region_selflink(name, params): if name is None: return url = r"https://www.googleapis.com/compute/v1/projects/.*/regions/[a-z1-9\-]*" if not re.match(url, name): name = "https://www.googleapis.com/compute/v1/projects/{project}/regions/%s".format(**params) % name return name def async_op_url(module, extra_data=None): if extra_data is None: extra_data = {} url = "https://www.googleapis.com/compute/v1/projects/{project}/regions/{region}/operations/{op_id}" combined = extra_data.copy() combined.update(module.params) return url.format(**combined) def wait_for_operation(module, response): op_result = return_if_object(module, response, 'compute#operation') if op_result is None: return {} status = navigate_hash(op_result, ['status']) wait_done = wait_for_completion(status, op_result, module) return fetch_resource(module, navigate_hash(wait_done, ['targetLink']), 'compute#interconnectAttachment') def wait_for_completion(status, op_result, module): op_id = navigate_hash(op_result, ['name']) op_uri = async_op_url(module, {'op_id': op_id}) while status != 'DONE': raise_if_errors(op_result, ['error', 'errors'], module) time.sleep(1.0) op_result = fetch_resource(module, op_uri, 'compute#operation', False) status = navigate_hash(op_result, ['status']) return op_result def raise_if_errors(response, err_path, module): errors = navigate_hash(response, err_path) if errors is not None: module.fail_json(msg=errors) class InterconnectAttachmentPrivateinterconnectinfo(object): def __init__(self, request, module): self.module = module if request: self.request = request else: self.request = {} def to_request(self): return remove_nones_from_dict({}) def from_response(self): return remove_nones_from_dict({}) if __name__ == '__main__': main()
gpl-3.0
openstack/ironic
ironic/common/release_mappings.py
1
12857
# Copyright 2016 Intel Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from ironic.common.i18n import _ # NOTE(xek): This decides the version cap of RPC messages sent to conductor # and objects during rolling upgrades, when [DEFAULT]/pin_release_version # configuration is set. # # Remember to add a new entry for the new version that is shipping in a new # release. # # We support a rolling upgrade between adjacent named releases, as well as # between a release and master, so old, unsupported releases can be removed, # together with the supporting code, which is typically found in an object's # make_compatible methods and RPC client code. # NOTE(xek): The format of this dict is: # { '<release version>': { # 'api': '<Bare Metal API version>', # 'rpc': '<RPC API version>', # 'objects': { # '<object class name>': ['<object version>'], # } # }, # } # The list should contain all objects which are persisted in the database and # sent over RPC. Notifications/Payloads are not being included here since we # don't need to pin them during rolling upgrades. # # For each object, list the versions that the object can be in for a particular # release. That is, any new versions that were added in that release. If there # were no new versions, it should have the same (latest) version as the # previous release. # NOTE(rloo): We need a list, not just the latest version, for the DB queries # that filter for objects that are not in particular versions; for more info, # see comments after L1128 of # https://review.opendev.org/#/c/408556/52/ironic/db/sqlalchemy/api.py. # # There should always be a 'master' entry that reflects the objects in the # master branch. # # Just before doing a release, copy the 'master' entry, and rename the first # 'master' entry to the (semver) version being released. # # Just after doing a named release, delete any entries associated with the # oldest named release. RELEASE_MAPPING = { '9.2': { 'rpc': '1.41', 'api': '1.35', 'objects': { 'Node': ['1.21'], 'Conductor': ['1.2'], 'Chassis': ['1.3'], 'Port': ['1.7'], 'Portgroup': ['1.3'], 'VolumeConnector': ['1.0'], 'VolumeTarget': ['1.0'], } }, '10.0': { 'api': '1.36', 'rpc': '1.42', 'objects': { 'Node': ['1.22'], 'Conductor': ['1.2'], 'Chassis': ['1.3'], 'Port': ['1.7'], 'Portgroup': ['1.3'], 'VolumeConnector': ['1.0'], 'VolumeTarget': ['1.0'], } }, '10.1': { 'api': '1.38', 'rpc': '1.44', 'objects': { 'Node': ['1.23'], 'Conductor': ['1.2'], 'Chassis': ['1.3'], 'Port': ['1.7'], 'Portgroup': ['1.3'], 'Trait': ['1.0'], 'TraitList': ['1.0'], 'VolumeConnector': ['1.0'], 'VolumeTarget': ['1.0'], } }, '11.0': { 'api': '1.43', 'rpc': '1.44', 'objects': { 'Node': ['1.25', '1.24'], 'Conductor': ['1.2'], 'Chassis': ['1.3'], 'Port': ['1.8'], 'Portgroup': ['1.4'], 'Trait': ['1.0'], 'TraitList': ['1.0'], 'VolumeConnector': ['1.0'], 'VolumeTarget': ['1.0'], } }, '11.1': { 'api': '1.46', 'rpc': '1.47', 'objects': { 'Node': ['1.27', '1.26'], 'Conductor': ['1.3'], 'Chassis': ['1.3'], 'Port': ['1.8'], 'Portgroup': ['1.4'], 'Trait': ['1.0'], 'TraitList': ['1.0'], 'VolumeConnector': ['1.0'], 'VolumeTarget': ['1.0'], } }, '12.0': { 'api': '1.49', 'rpc': '1.47', 'objects': { 'Node': ['1.29', '1.28'], 'Conductor': ['1.3'], 'Chassis': ['1.3'], 'Port': ['1.8'], 'Portgroup': ['1.4'], 'Trait': ['1.0'], 'TraitList': ['1.0'], 'VolumeConnector': ['1.0'], 'VolumeTarget': ['1.0'], } }, '12.1': { 'api': '1.56', 'rpc': '1.48', 'objects': { 'Allocation': ['1.0'], 'Node': ['1.32', '1.31', '1.30'], 'Conductor': ['1.3'], 'Chassis': ['1.3'], 'DeployTemplate': ['1.0', '1.1'], 'Port': ['1.9'], 'Portgroup': ['1.4'], 'Trait': ['1.0'], 'TraitList': ['1.0'], 'VolumeConnector': ['1.0'], 'VolumeTarget': ['1.0'], } }, '12.2': { 'api': '1.58', 'rpc': '1.48', 'objects': { 'Allocation': ['1.0'], 'Node': ['1.32'], 'Conductor': ['1.3'], 'Chassis': ['1.3'], 'DeployTemplate': ['1.1'], 'Port': ['1.9'], 'Portgroup': ['1.4'], 'Trait': ['1.0'], 'TraitList': ['1.0'], 'VolumeConnector': ['1.0'], 'VolumeTarget': ['1.0'], } }, '13.0': { 'api': '1.58', 'rpc': '1.48', 'objects': { 'Allocation': ['1.0'], 'Node': ['1.32'], 'Conductor': ['1.3'], 'Chassis': ['1.3'], 'DeployTemplate': ['1.1'], 'Port': ['1.9'], 'Portgroup': ['1.4'], 'Trait': ['1.0'], 'TraitList': ['1.0'], 'VolumeConnector': ['1.0'], 'VolumeTarget': ['1.0'], } }, '14.0': { 'api': '1.61', 'rpc': '1.48', 'objects': { 'Allocation': ['1.1'], 'Node': ['1.33', '1.32'], 'Conductor': ['1.3'], 'Chassis': ['1.3'], 'DeployTemplate': ['1.1'], 'Port': ['1.9'], 'Portgroup': ['1.4'], 'Trait': ['1.0'], 'TraitList': ['1.0'], 'VolumeConnector': ['1.0'], 'VolumeTarget': ['1.0'], } }, '15.0': { 'api': '1.65', 'rpc': '1.50', 'objects': { 'Allocation': ['1.1'], 'Node': ['1.34', '1.33', '1.32'], 'Conductor': ['1.3'], 'Chassis': ['1.3'], 'DeployTemplate': ['1.1'], 'Port': ['1.9'], 'Portgroup': ['1.4'], 'Trait': ['1.0'], 'TraitList': ['1.0'], 'VolumeConnector': ['1.0'], 'VolumeTarget': ['1.0'], } }, '15.1': { 'api': '1.67', 'rpc': '1.50', 'objects': { 'Allocation': ['1.1'], 'Node': ['1.35', '1.34'], 'Conductor': ['1.3'], 'Chassis': ['1.3'], 'DeployTemplate': ['1.1'], 'Port': ['1.9'], 'Portgroup': ['1.4'], 'Trait': ['1.0'], 'TraitList': ['1.0'], 'VolumeConnector': ['1.0'], 'VolumeTarget': ['1.0'], } }, '16.0': { 'api': '1.68', 'rpc': '1.51', 'objects': { 'Allocation': ['1.1'], 'Node': ['1.35'], 'Conductor': ['1.3'], 'Chassis': ['1.3'], 'Deployment': ['1.0'], 'DeployTemplate': ['1.1'], 'Port': ['1.9'], 'Portgroup': ['1.4'], 'Trait': ['1.0'], 'TraitList': ['1.0'], 'VolumeConnector': ['1.0'], 'VolumeTarget': ['1.0'], } }, '16.1': { 'api': '1.68', 'rpc': '1.51', 'objects': { 'Allocation': ['1.1'], 'Node': ['1.35'], 'Conductor': ['1.3'], 'Chassis': ['1.3'], 'Deployment': ['1.0'], 'DeployTemplate': ['1.1'], 'Port': ['1.9'], 'Portgroup': ['1.4'], 'Trait': ['1.0'], 'TraitList': ['1.0'], 'VolumeConnector': ['1.0'], 'VolumeTarget': ['1.0'], } }, '16.2': { 'api': '1.69', 'rpc': '1.52', 'objects': { 'Allocation': ['1.1'], 'Node': ['1.35'], 'Conductor': ['1.3'], 'Chassis': ['1.3'], 'Deployment': ['1.0'], 'DeployTemplate': ['1.1'], 'Port': ['1.10'], 'Portgroup': ['1.4'], 'Trait': ['1.0'], 'TraitList': ['1.0'], 'VolumeConnector': ['1.0'], 'VolumeTarget': ['1.0'], } }, '17.0': { 'api': '1.72', 'rpc': '1.54', 'objects': { 'Allocation': ['1.1'], 'Node': ['1.35'], 'Conductor': ['1.3'], 'Chassis': ['1.3'], 'Deployment': ['1.0'], 'DeployTemplate': ['1.1'], 'Port': ['1.10'], 'Portgroup': ['1.4'], 'Trait': ['1.0'], 'TraitList': ['1.0'], 'VolumeConnector': ['1.0'], 'VolumeTarget': ['1.0'], } }, '18.0': { 'api': '1.74', 'rpc': '1.54', 'objects': { 'Allocation': ['1.1'], 'BIOSSetting': ['1.1'], 'Node': ['1.35'], 'Conductor': ['1.3'], 'Chassis': ['1.3'], 'Deployment': ['1.0'], 'DeployTemplate': ['1.1'], 'Port': ['1.10'], 'Portgroup': ['1.4'], 'Trait': ['1.0'], 'TraitList': ['1.0'], 'VolumeConnector': ['1.0'], 'VolumeTarget': ['1.0'], } }, 'master': { 'api': '1.74', 'rpc': '1.54', 'objects': { 'Allocation': ['1.1'], 'BIOSSetting': ['1.1'], 'Node': ['1.35'], 'Conductor': ['1.3'], 'Chassis': ['1.3'], 'Deployment': ['1.0'], 'DeployTemplate': ['1.1'], 'Port': ['1.10'], 'Portgroup': ['1.4'], 'Trait': ['1.0'], 'TraitList': ['1.0'], 'VolumeConnector': ['1.0'], 'VolumeTarget': ['1.0'], } }, } # NOTE(xek): Assign each named release to the appropriate semver. # # Just before we do a new named release (more specifically, create # a stable/<release> branch), add a mapping for the new named # release. This is needed; otherwise CI: a unit test (common. # ReleaseMappingsTestCase.test_contains_current_release_entry()) # and grenade that tests old/new (new-release -> master) will fail. # # Just after we do a new named release, delete the oldest named # release (that we are no longer supporting for a rolling upgrade). # # There should be at most two named mappings here. # NOTE(mgoddard): remove victoria prior to the xena release. RELEASE_MAPPING['victoria'] = RELEASE_MAPPING['16.0'] RELEASE_MAPPING['wallaby'] = RELEASE_MAPPING['17.0'] # List of available versions with named versions first; 'master' is excluded. RELEASE_VERSIONS = sorted(set(RELEASE_MAPPING) - {'master'}, reverse=True) # List of available (version, description) tuples. RELEASE_VERSIONS_DESCS = [(v, _('"%s" release') % v) for v in RELEASE_VERSIONS] def get_object_versions(releases=None, objects=None): """Gets the supported versions for all objects. Supported versions are from the RELEASE_MAPPINGs. :param releases: a list of release names; if empty/None, versions from all releases are returned (the default). :param objects: a list of names of objects of interest. If empty/None, versions of all objects are returned (the default). :returns: a dictionary where the key is the object name and the value is a set of supported versions. """ if not releases: releases = list(RELEASE_MAPPING) versions = {} for release in releases: object_mapping = RELEASE_MAPPING[release]['objects'] for obj, version_list in object_mapping.items(): if not objects or obj in objects: versions.setdefault(obj, set()).update(version_list) return versions
apache-2.0
p0cisk/Quantum-GIS
python/plugins/processing/algs/grass7/ext/i_gensigset.py
7
1995
# -*- coding: utf-8 -*- """ *************************************************************************** i_gensigset.py -------------- Date : March 2016 Copyright : (C) 2016 by Médéric Ribreux Email : medspx at medspx dot fr *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ from __future__ import absolute_import __author__ = 'Médéric Ribreux' __date__ = 'March 2016' __copyright__ = '(C) 2016, Médéric Ribreux' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' from .i import regroupRasters, file2Output, moveFile from os import path from ..Grass7Utils import Grass7Utils def processCommand(alg): # Transform output files in string parameter signatureFile = alg.getOutputFromName('signaturefile') origSigFile = signatureFile.value shortSigFile = path.basename(origSigFile) alg.setOutputValue('signaturefile', shortSigFile) signatureFile = file2Output(alg, 'signaturefile') # Regroup rasters group, subgroup = regroupRasters(alg, 'input', 'group', 'subgroup') # Re-add signature files alg.addOutput(signatureFile) # Find Grass directory interSig = path.join(Grass7Utils.grassMapsetFolder(), 'PERMANENT', 'group', group, 'subgroup', subgroup, 'sigset', shortSigFile) moveFile(alg, interSig, origSigFile) alg.setOutputValue('signaturefile', origSigFile)
gpl-2.0
childresslab/MicrocavityExp1
gui/manager/managergui.py
1
25022
# -*- coding: utf-8 -*- """ This module contains a GUI through which the Manager core class can be controlled. It can load and reload modules, show the configuration, and re-open closed windows. Qudi is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Qudi is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Qudi. If not, see <http://www.gnu.org/licenses/>. Copyright (c) the Qudi Developers. See the COPYRIGHT.txt file at the top-level directory of this distribution and at <https://github.com/Ulm-IQO/qudi/> """ import core.logger import logging import numpy as np import os from collections import OrderedDict from core.module import StatusVar from .errordialog import ErrorDialog from gui.guibase import GUIBase from qtpy import QtCore, QtWidgets, uic from qtpy.QtGui import QPalette from qtpy.QtWidgets import QWidget try: from qtconsole.inprocess import QtInProcessKernelManager except ImportError: from IPython.qt.inprocess import QtInProcessKernelManager try: from git import Repo except: pass try: import pyqtgraph as pg _has_pyqtgraph = True except: _has_pyqtgraph = False # Rather than import the ui*.py file here, the ui*.ui file itself is # loaded by uic.loadUI in the QtGui classes below. class ManagerGui(GUIBase): """This class provides a GUI to the Qudi manager. @signal sigStartAll: sent when all modules should be loaded @signal str str sigStartThis: load a specific module @signal str str sigReloadThis reload a specific module from Python code @signal str str sigStopThis: stop all actions of a module and remove references It supports module loading, reloading, logging and other administrative tasks. """ # status vars consoleFontSize = StatusVar('console_font_size', 10) # signals sigStartAll = QtCore.Signal() sigStartModule = QtCore.Signal(str, str) sigReloadModule = QtCore.Signal(str, str) sigCleanupStatus = QtCore.Signal(str, str) sigStopModule = QtCore.Signal(str, str) sigLoadConfig = QtCore.Signal(str, bool) sigSaveConfig = QtCore.Signal(str) sigRealQuit = QtCore.Signal() def __init__(self, **kwargs): """Create an instance of the module. @param object manager: @param str name: @param dict config: """ super().__init__(**kwargs) self.modlist = list() self.modules = set() def on_activate(self): """ Activation method called on change to active state. This method creates the Manager main window. """ if _has_pyqtgraph: # set background of pyqtgraph testwidget = QWidget() testwidget.ensurePolished() bgcolor = testwidget.palette().color(QPalette.Normal, testwidget.backgroundRole()) # set manually the background color in hex code according to our # color scheme: pg.setConfigOption('background', bgcolor) # opengl usage if 'useOpenGL' in self._manager.tree['global']: pg.setConfigOption('useOpenGL', self._manager.tree['global']['useOpenGL']) self._mw = ManagerMainWindow() self.restoreWindowPos(self._mw) self.errorDialog = ErrorDialog(self) self._about = AboutDialog() version = self.getSoftwareVersion() configFile = self._manager.configFile self._about.label.setText( '<a href=\"https://github.com/Ulm-IQO/qudi/commit/{0}\"' ' style=\"color: cyan;\"> {0} </a>, on branch {1}.'.format( version[0], version[1])) self.versionLabel = QtWidgets.QLabel() self.versionLabel.setText( '<a href=\"https://github.com/Ulm-IQO/qudi/commit/{0}\"' ' style=\"color: cyan;\"> {0} </a>,' ' on branch {1}, configured from {2}'.format( version[0], version[1], configFile)) self.versionLabel.setOpenExternalLinks(True) self._mw.statusBar().addWidget(self.versionLabel) # Connect up the buttons. self._mw.actionQuit.triggered.connect(self._manager.quit) self._mw.actionLoad_configuration.triggered.connect(self.getLoadFile) self._mw.actionReload_current_configuration.triggered.connect(self.reloadConfig) self._mw.actionSave_configuration.triggered.connect(self.getSaveFile) self._mw.action_Load_all_modules.triggered.connect(self._manager.startAllConfiguredModules) self._mw.actionAbout_Qt.triggered.connect(QtWidgets.QApplication.aboutQt) self._mw.actionAbout_Qudi.triggered.connect(self.showAboutQudi) self._mw.actionReset_to_default_layout.triggered.connect(self.resetToDefaultLayout) self._manager.sigShowManager.connect(self.show) self._manager.sigConfigChanged.connect(self.updateConfigWidgets) self._manager.sigModulesChanged.connect(self.updateConfigWidgets) self._manager.sigShutdownAcknowledge.connect(self.promptForShutdown) # Log widget self._mw.logwidget.setManager(self._manager) for loghandler in logging.getLogger().handlers: if isinstance(loghandler, core.logger.QtLogHandler): loghandler.sigLoggedMessage.connect(self.handleLogEntry) # Module widgets self.sigStartModule.connect(self._manager.startModule) self.sigReloadModule.connect(self._manager.restartModuleRecursive) self.sigCleanupStatus.connect(self._manager.removeStatusFile) self.sigStopModule.connect(self._manager.deactivateModule) self.sigLoadConfig.connect(self._manager.loadConfig) self.sigSaveConfig.connect(self._manager.saveConfig) self.sigRealQuit.connect(self._manager.realQuit) # Module state display self.checkTimer = QtCore.QTimer() self.checkTimer.start(1000) self.updateGUIModuleList() # IPython console widget self.startIPython() self.updateIPythonModuleList() self.startIPythonWidget() # thread widget self._mw.threadWidget.threadListView.setModel(self._manager.tm) # remote widget self._mw.remoteWidget.hostLabel.setText('URL:') self._mw.remoteWidget.portLabel.setText( 'rpyc://{0}:{1}/'.format(self._manager.rm.host, self._manager.rm.server.port)) self._mw.remoteWidget.remoteModuleListView.setModel( self._manager.rm.remoteModules) self._mw.remoteWidget.sharedModuleListView.setModel( self._manager.rm.sharedModules) self._mw.configDisplayDockWidget.hide() self._mw.remoteDockWidget.hide() self._mw.threadDockWidget.hide() self._mw.show() def on_deactivate(self): """Close window and remove connections. """ self.stopIPythonWidget() self.stopIPython() self.checkTimer.stop() if len(self.modlist) > 0: self.checkTimer.timeout.disconnect() self.sigStartModule.disconnect() self.sigReloadModule.disconnect() self.sigStopModule.disconnect() self.sigLoadConfig.disconnect() self.sigSaveConfig.disconnect() self._mw.actionQuit.triggered.disconnect() self._mw.actionLoad_configuration.triggered.disconnect() self._mw.actionSave_configuration.triggered.disconnect() self._mw.action_Load_all_modules.triggered.disconnect() self._mw.actionAbout_Qt.triggered.disconnect() self._mw.actionAbout_Qudi.triggered.disconnect() self.saveWindowPos(self._mw) self._mw.close() def show(self): """Show the window and bring it t the top. """ QtWidgets.QMainWindow.show(self._mw) self._mw.activateWindow() self._mw.raise_() def showAboutQudi(self): """Show a dialog with details about Qudi. """ self._about.show() @QtCore.Slot(bool, bool) def promptForShutdown(self, locked, broken): """ Display a dialog, asking the user to confirm shutdown. """ text = "Some modules are locked right now, really quit?" result = QtWidgets.QMessageBox.question( self._mw, 'Qudi: Really Quit?', text, QtWidgets.QMessageBox.Yes, QtWidgets.QMessageBox.No ) if result == QtWidgets.QMessageBox.Yes: self.sigRealQuit.emit() def resetToDefaultLayout(self): """ Return the dockwidget layout and visibility to its default state """ self._mw.configDisplayDockWidget.setVisible(False) self._mw.consoleDockWidget.setVisible(True) self._mw.remoteDockWidget.setVisible(False) self._mw.threadDockWidget.setVisible(False) self._mw.logDockWidget.setVisible(True) self._mw.actionConfigurationView.setChecked(False) self._mw.actionConsoleView.setChecked(True) self._mw.actionRemoteView.setChecked(False) self._mw.actionThreadsView.setChecked(False) self._mw.actionLogView.setChecked(True) self._mw.configDisplayDockWidget.setFloating(False) self._mw.consoleDockWidget.setFloating(False) self._mw.remoteDockWidget.setFloating(False) self._mw.threadDockWidget.setFloating(False) self._mw.logDockWidget.setFloating(False) self._mw.addDockWidget(QtCore.Qt.DockWidgetArea(8), self._mw.configDisplayDockWidget) self._mw.addDockWidget(QtCore.Qt.DockWidgetArea(2), self._mw.consoleDockWidget) self._mw.addDockWidget(QtCore.Qt.DockWidgetArea(8), self._mw.remoteDockWidget) self._mw.addDockWidget(QtCore.Qt.DockWidgetArea(8), self._mw.threadDockWidget) self._mw.addDockWidget(QtCore.Qt.DockWidgetArea(8), self._mw.logDockWidget) def handleLogEntry(self, entry): """ Forward log entry to log widget and show an error popup if it is an error message. @param dict entry: Log entry """ self._mw.logwidget.addEntry(entry) if entry['level'] == 'error' or entry['level'] == 'critical': self.errorDialog.show(entry) def startIPython(self): """ Create an IPython kernel manager and kernel. Add modules to its namespace. """ # make sure we only log errors and above from ipython logging.getLogger('ipykernel').setLevel(logging.WARNING) self.log.debug('IPy activation in thread {0}'.format( QtCore.QThread.currentThreadId())) self.kernel_manager = QtInProcessKernelManager() self.kernel_manager.start_kernel() self.kernel = self.kernel_manager.kernel self.namespace = self.kernel.shell.user_ns self.namespace.update({ 'np': np, 'config': self._manager.tree['defined'], 'manager': self._manager }) if _has_pyqtgraph: self.namespace['pg'] = pg self.updateIPythonModuleList() self.kernel.gui = 'qt4' self.log.info('IPython has kernel {0}'.format( self.kernel_manager.has_kernel)) self.log.info('IPython kernel alive {0}'.format( self.kernel_manager.is_alive())) self._manager.sigModulesChanged.connect(self.updateIPythonModuleList) def startIPythonWidget(self): """ Create an IPython console widget and connect it to an IPython kernel. """ if (_has_pyqtgraph): banner_modules = 'The numpy and pyqtgraph modules have already ' \ 'been imported as ''np'' and ''pg''.' else: banner_modules = 'The numpy module has already been imported ' \ 'as ''np''.' banner = """ This is an interactive IPython console. {0} Configuration is in 'config', the manager is 'manager' and all loaded modules are in this namespace with their configured name. View the current namespace with dir(). Go, play. """.format(banner_modules) self._mw.consolewidget.banner = banner # font size self.consoleSetFontSize(self.consoleFontSize) # settings self._csd = ConsoleSettingsDialog() self._csd.accepted.connect(self.consoleApplySettings) self._csd.rejected.connect(self.consoleKeepSettings) self._csd.buttonBox.button( QtWidgets.QDialogButtonBox.Apply).clicked.connect( self.consoleApplySettings) self._mw.actionConsoleSettings.triggered.connect(self._csd.exec_) self.consoleKeepSettings() self._mw.consolewidget.kernel_manager = self.kernel_manager self._mw.consolewidget.kernel_client = \ self._mw.consolewidget.kernel_manager.client() self._mw.consolewidget.kernel_client.start_channels() # the linux style theme which is basically the monokai theme self._mw.consolewidget.set_default_style(colors='linux') def stopIPython(self): """ Stop the IPython kernel. """ self.log.debug('IPy deactivation: {0}'.format(QtCore.QThread.currentThreadId())) self.kernel_manager.shutdown_kernel() def stopIPythonWidget(self): """ Disconnect the IPython widget from the kernel. """ self._mw.consolewidget.kernel_client.stop_channels() def updateIPythonModuleList(self): """Remove non-existing modules from namespace, add new modules to namespace, update reloaded modules """ currentModules = set() newNamespace = dict() for base in ['hardware', 'logic', 'gui']: for module in self._manager.tree['loaded'][base]: currentModules.add(module) newNamespace[module] = self._manager.tree[ 'loaded'][base][module] discard = self.modules - currentModules self.namespace.update(newNamespace) for module in discard: self.namespace.pop(module, None) self.modules = currentModules def consoleKeepSettings(self): """ Write old values into config dialog. """ self._csd.fontSizeBox.setProperty('value', self.consoleFontSize) def consoleApplySettings(self): """ Apply values from config dialog to console. """ self.consoleSetFontSize(self._csd.fontSizeBox.value()) def consoleSetFontSize(self, fontsize): self._mw.consolewidget.font_size = fontsize self.consoleFontSize = fontsize self._mw.consolewidget.reset_font() def updateConfigWidgets(self): """ Clear and refill the tree widget showing the configuration. """ self.fillTreeWidget(self._mw.treeWidget, self._manager.tree) def updateGUIModuleList(self): """ Clear and refill the module list widget """ # self.clearModuleList(self) self.fillModuleList(self._mw.guilayout, 'gui') self.fillModuleList(self._mw.logiclayout, 'logic') self.fillModuleList(self._mw.hwlayout, 'hardware') def fillModuleList(self, layout, base): """ Fill the module list widget with module widgets for defined gui modules. @param QLayout layout: layout of th module list widget where module widgest should be addad @param str base: module category to fill """ for module in self._manager.tree['defined'][base]: if not module in self._manager.tree['global']['startup']: widget = ModuleListItem(self._manager, base, module) self.modlist.append(widget) layout.addWidget(widget) widget.sigLoadThis.connect(self.sigStartModule) widget.sigReloadThis.connect(self.sigReloadModule) widget.sigDeactivateThis.connect(self.sigStopModule) widget.sigCleanupStatus.connect(self.sigCleanupStatus) self.checkTimer.timeout.connect(widget.checkModuleState) def fillTreeItem(self, item, value): """ Recursively fill a QTreeWidgeItem with the contents from a dictionary. @param QTreeWidgetItem item: the widget item to fill @param (dict, list, etc) value: value to fill in """ item.setExpanded(True) if type(value) is OrderedDict or type(value) is dict: for key in value: child = QtWidgets.QTreeWidgetItem() child.setText(0, key) item.addChild(child) self.fillTreeItem(child, value[key]) elif type(value) is list: for val in value: child = QtWidgets.QTreeWidgetItem() item.addChild(child) if type(val) is dict: child.setText(0, '[dict]') self.fillTreeItem(child, val) elif type(val) is OrderedDict: child.setText(0, '[odict]') self.fillTreeItem(child, val) elif type(val) is list: child.setText(0, '[list]') self.fillTreeItem(child, val) else: child.setText(0, str(val)) child.setExpanded(True) else: child = QtWidgets.QTreeWidgetItem() child.setText(0, str(value)) item.addChild(child) def getSoftwareVersion(self): """ Try to determine the software version in case the program is in a git repository. """ try: repo = Repo(self.get_main_dir()) branch = repo.active_branch rev = str(repo.head.commit) return (rev, str(branch)) except Exception as e: print('Could not get git repo because:', e) return ('unknown', -1) def fillTreeWidget(self, widget, value): """ Fill a QTreeWidget with the content of a dictionary @param QTreeWidget widget: the tree widget to fill @param dict,OrderedDict value: the dictionary to fill in """ widget.clear() self.fillTreeItem(widget.invisibleRootItem(), value) def reloadConfig(self): """ Reload the current config. """ reply = QtWidgets.QMessageBox.question( self._mw, 'Restart', 'Do you want to restart the current configuration?', QtWidgets.QMessageBox.Yes, QtWidgets.QMessageBox.No ) configFile = self._manager._getConfigFile() restart = (reply == QtWidgets.QMessageBox.Yes) self.sigLoadConfig.emit(configFile, restart) def getLoadFile(self): """ Ask the user for a file where the configuration should be loaded from """ defaultconfigpath = os.path.join(self.get_main_dir(), 'config') filename = QtWidgets.QFileDialog.getOpenFileName( self._mw, 'Load Configration', defaultconfigpath, 'Configuration files (*.cfg)')[0] if filename != '': reply = QtWidgets.QMessageBox.question( self._mw, 'Restart', 'Do you want to restart to use the configuration?', QtWidgets.QMessageBox.Yes, QtWidgets.QMessageBox.No ) restart = (reply == QtWidgets.QMessageBox.Yes) self.sigLoadConfig.emit(filename, restart) def getSaveFile(self): """ Ask the user for a file where the configuration should be saved to. """ defaultconfigpath = os.path.join(self.get_main_dir(), 'config') filename = QtWidgets.QFileDialog.getSaveFileName( self._mw, 'Save Configration', defaultconfigpath, 'Configuration files (*.cfg)')[0] if filename != '': self.sigSaveConfig.emit(filename) class ManagerMainWindow(QtWidgets.QMainWindow): """ This class represents the Manager Window. """ def __init__(self): """ Create the Manager Window. """ # Get the path to the *.ui file this_dir = os.path.dirname(__file__) ui_file = os.path.join(this_dir, 'ui_manager_window.ui') # Load it super(ManagerMainWindow, self).__init__() uic.loadUi(ui_file, self) self.show() # Set up the layout # this really cannot be done in Qt designer, you cannot set a layout # on an empty widget self.guilayout = QtWidgets.QVBoxLayout(self.guiscroll) self.logiclayout = QtWidgets.QVBoxLayout(self.logicscroll) self.hwlayout = QtWidgets.QVBoxLayout(self.hwscroll) class AboutDialog(QtWidgets.QDialog): """ This class represents the Qudi About dialog. """ def __init__(self): """ Create Qudi About Dialog. """ # Get the path to the *.ui file this_dir = os.path.dirname(__file__) ui_file = os.path.join(this_dir, 'ui_about.ui') # Load it super().__init__() uic.loadUi(ui_file, self) class ConsoleSettingsDialog(QtWidgets.QDialog): """ Create the SettingsDialog window, based on the corresponding *.ui file. """ def __init__(self): # Get the path to the *.ui file this_dir = os.path.dirname(__file__) ui_file = os.path.join(this_dir, 'ui_console_settings.ui') # Load it super().__init__() uic.loadUi(ui_file, self) class ModuleListItem(QtWidgets.QFrame): """ This class represents a module widget in the Qudi module list. @signal str str sigLoadThis: gives signal with base and name of module to be loaded @signal str str sigReloadThis: gives signal with base and name of module to be reloaded @signal str str sigStopThis: gives signal with base and name of module to be deactivated """ sigLoadThis = QtCore.Signal(str, str) sigReloadThis = QtCore.Signal(str, str) sigDeactivateThis = QtCore.Signal(str, str) sigCleanupStatus = QtCore.Signal(str, str) def __init__(self, manager, basename, modulename): """ Create a module widget. @param str basename: module category @param str modulename: unique module name """ # Get the path to the *.ui file this_dir = os.path.dirname(__file__) ui_file = os.path.join(this_dir, 'ui_module_widget.ui') # Load it super().__init__() uic.loadUi(ui_file, self) self.manager = manager self.name = modulename self.base = basename self.loadButton.setText('Load {0}'.format(self.name)) # connect buttons self.loadButton.clicked.connect(self.loadButtonClicked) self.reloadButton.clicked.connect(self.reloadButtonClicked) self.deactivateButton.clicked.connect(self.deactivateButtonClicked) self.cleanupButton.clicked.connect(self.cleanupButtonClicked) def loadButtonClicked(self): """ Send signal to load and activate this module. """ self.sigLoadThis.emit(self.base, self.name) if self.base == 'gui': self.loadButton.setText('Show {0}'.format(self.name)) def reloadButtonClicked(self): """ Send signal to reload this module. """ self.sigReloadThis.emit(self.base, self.name) def deactivateButtonClicked(self): """ Send signal to deactivate this module. """ self.sigDeactivateThis.emit(self.base, self.name) def cleanupButtonClicked(self): """ Send signal to deactivate this module. """ self.sigCleanupStatus.emit(self.base, self.name) def checkModuleState(self): """ Get the state of this module and display it in the statusLabel """ state = '' if self.statusLabel.text() != 'exception, cannot get state': try: if (self.base in self.manager.tree['loaded'] and self.name in self.manager.tree['loaded'][self.base]): state = self.manager.tree['loaded'][self.base][self.name].getState() else: state = 'not loaded' except: state = 'exception, cannot get state' self.statusLabel.setText(state)
gpl-3.0
meteorcloudy/tensorflow
tensorflow/contrib/all_reduce/__init__.py
38
1466
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """All-reduce implementations.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function # pylint: disable=unused-import,line-too-long,wildcard-import from tensorflow.contrib.all_reduce.python.all_reduce import * from tensorflow.python.util.all_util import remove_undocumented # pylint: enable=unused-import,line-too-long,wildcard-import _allowed_symbols = [ 'build_ring_all_reduce', 'build_recursive_hd_all_reduce', 'build_shuffle_all_reduce', 'build_nccl_all_reduce', 'build_nccl_then_ring', 'build_nccl_then_recursive_hd', 'build_nccl_then_shuffle', 'build_shuffle_then_ring', 'build_shuffle_then_shuffle' ] remove_undocumented(__name__, allowed_exception_list=_allowed_symbols)
apache-2.0
soylentdeen/BlurryApple
Tools/Gendron/gral_zer.py
2
7113
# -*- coding: utf-8 -*- """ Created on Fri Oct 24 16:52:46 2014 Mix of various routines around Zernike modes @author: tristanbuey """ # Load libraries import numpy as np; def gammX(n, i0): """ gammX(n, i0) Computes Noll matrix for derivatives of Zernike Gx. The matrix applies on a Zernike vector z, and produces the Zernike decomposition z' of x-derivative : z' = Gx . z n = number of zernike coefficients on the input vector z. i0 = zernike index of the first coefficient of z (1=piston, 2=tip, ...) It results that Gx is a matrix with a size (n+i0-1, n). """ gg = np.zeros((i0+n-1,n)); # variable i will span Zernike indexes, starting at i0. for i in range (i0, i0+n): # variable j spans Zernike indexes, starting at piston # and stopping at i for j in range(1, i+1): gg[j-1,i-i0] = gamX(i,j); return gg; def gammY(n, i0): """ gammY(n, i0) Computes Noll matrix for derivatives of Zernike Gy. The matrix applies on a Zernike vector z, and produces the Zernike decomposition z' of y-derivative : z' = Gy . z n = number of zernike coefficients on the input vector z. i0 = zernike index of the first coefficient of z (1=piston, 2=tip, ...) It results that Gy is a matrix with a size (n+i0-1, n). """ gg = np.zeros((i0+n-1,n)); # variable i will span Zernike indexes, starting at i0. for i in range(i0, i0+n): # variable j spans Zernike indexes, starting at piston # and stopping at i for j in range(1, i+1): gg[j-1,i-i0] = gamY(i,j); return gg; """ A lot of sub-functions to calculate the Noll matrix """ def pair(number): return number % 2 == 0; def impair(num): return num % 2 != 0; def nm(i): """ For a given Zernike mode of index <i>, returns the radial and azimutal orders (n,m) """ n = int( (-1.+np.sqrt(8*(i-1)+1))/2.); p = (i-(n*(n+1))/2); k = n%2; m = int((p+k)/2)*2 - k; return (n,m); def gamY(i,j): """ Input arguments: 2 scalar int i and j, that are indexes of Zernike modes. Returns the coefficient of the derivative matrix of (Noll R.J., 1976) The algorithm coded below is a python translation of the series of rules that Noll has enounced in his article of 1976, for derivating Zernike. Warning: Unfortunately Noll had made a little error in his rules, that has been corrected in this program. """ # determine radial and azimutal orders of Zernike number i ni,mi = nm(i); # idem for j n,m = nm(j); # Noll's rules : if(mi==(m-1) or mi==(m+1)): if(m==0 or mi==0): if((m==0 and impair(i)) or (mi==0 and impair(j))): return np.sqrt(2*(n+1)*(ni+1)); else: return 0.00; else: if(impair(i+j)): if((mi==m+1 and impair(j)) or (mi==m-1 and pair(j))): return -np.sqrt((n+1)*(ni+1)); else: return np.sqrt((n+1)*(ni+1)); else: return 0.0; else: return 0.0; return; def gamX(i,j): """ Input arguments: 2 scalar int i and j, that are indexes of Zernike modes. Returns the coefficient of the derivative matrix of (Noll R.J., 1976) The algorithm coded below is a python translation of the series of rules that Noll has enounced in his article of 1976, for derivating Zernike. Warning: Unfortunately Noll had made a little error in his rules, that has been corrected in this program. """ # determine radial and azimutal orders of Zernike number i ni,mi = nm(i); # idem for j n,m = nm(j); # Noll's rules : if(mi==m-1 or mi==m+1): if(m==0 or mi==0): if((m==0 and pair(i)) or (mi==0 and pair(j))): return np.sqrt(2*(n+1)*(ni+1)); else: return 0.00; else: if( (j+i)%2==0 ): return np.sqrt((n+1)*(ni+1)); else: return 0.00; else: return 0.0; return; def polyfute(m,n): """ Les coefs des poly de zer sont des K_mn(s). Le coeff K_mn(s) pondère r^(n-2s) Il y a la relation de recurrence K_mn(s+1) = K_mn(s) * ((n+m)/2-s)*((n-m)/2-s)/(s+1)/(n-s) Il y a aussi K_mn(0) = n! / ((n+m)/2)! / ((n-m)/2)! """ a = np.zeros(n+1) # Calcul de K_mn(0) st = 2 # start index for dividing by ((n-m)/2)! coef = 1.00 for i in range((n+m)/2+1, n+1): if( st<=((n-m)/2) and i%st==0 ) : j = i/st st = st+1 coef = coef*j else: coef = coef*i # division by ((n-m)/2)! (has already been partially done) for i in range(st,(n-m)/2+1): coef = coef / i a[n] = round(coef); # pour K_nm(0) for i in range(1,(n-m)/2+1): coef = -coef * ((n+m)/2-i+1)*((n-m)/2-i+1); coef = coef / i; coef = coef / (n-i+1); a[n-2*i] = round(coef) return a def evaluate_poly(n,m,a,r): """ evaluate_poly(n,m,a,r) n is the radial order m is the azimutal order a[] is the list of coefficient, with a(i+1) the coeff of r^i r is the variable of the polynomial """ if n>1 : r2 = r*r p = a[n] for i in range(n-2,m-1,-2): p = p*r2 + a[i] if(m==0): return p elif(m==1): p*=r elif(m==2): p*=r2 else: p = p * r**m return p def zer(r,t,i): """ Computes Zernike polynom of index i, at point (r,t) The algo is using 1) a recursive function to compute coefficients of the polynom, so that there is no factorial function of large numbers to invoke (risk of roundoff errors, plus takes more exec time) 2) a smarter way to compute polynomial expressions such as ax^3+bx^2+cx+d = x(x(ax+b)+c)+d to avoid roundoff errors and minimize number of operations """ if(i==1): return np.ones_like(r+t) # calcul de n et m a partir de i n = int( (-1.+np.sqrt(8*(i-1)+1))/2.) p = (i-(n*(n+1))/2); k = n%2; m = int((p+k)/2)*2 - k; a = polyfute(m,n) Z = evaluate_poly(n,m,a,r) * np.sqrt(n+1); if( m!=0 ): Z *= np.sqrt(2); if( i%2 ): Z *= np.sin(m*t) else: Z *= np.cos(m*t) return Z """ Defines a meshgrid of npt X npt points, and express it in polar coordinates. Returns a tuple (r,theta). """ def mkxy(npt, center): # generate an array of coordinates if center==1: x = np.linspace(-1,1,npt+1)[0:npt] else: x = np.linspace(-1,1,npt) x,y = np.meshgrid(x,x) # generates a map of the distance of subapertures to pupil center r = np.sqrt(x**2 + y**2) # generates a map of the azimut angle of subapertures theta = np.arctan2(y,x) return r,theta
gpl-2.0
lisael/pg-django
django/core/files/base.py
78
3910
import os try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from django.utils.encoding import smart_str, smart_unicode from django.core.files.utils import FileProxyMixin class File(FileProxyMixin): DEFAULT_CHUNK_SIZE = 64 * 2**10 def __init__(self, file, name=None): self.file = file if name is None: name = getattr(file, 'name', None) self.name = name self.mode = getattr(file, 'mode', None) def __str__(self): return smart_str(self.name or '') def __unicode__(self): return smart_unicode(self.name or u'') def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, self or "None") def __nonzero__(self): return bool(self.name) def __len__(self): return self.size def _get_size(self): if not hasattr(self, '_size'): if hasattr(self.file, 'size'): self._size = self.file.size elif os.path.exists(self.file.name): self._size = os.path.getsize(self.file.name) else: raise AttributeError("Unable to determine the file's size.") return self._size def _set_size(self, size): self._size = size size = property(_get_size, _set_size) def _get_closed(self): return not self.file or self.file.closed closed = property(_get_closed) def chunks(self, chunk_size=None): """ Read the file and yield chucks of ``chunk_size`` bytes (defaults to ``UploadedFile.DEFAULT_CHUNK_SIZE``). """ if not chunk_size: chunk_size = self.DEFAULT_CHUNK_SIZE if hasattr(self, 'seek'): self.seek(0) # Assume the pointer is at zero... counter = self.size while counter > 0: yield self.read(chunk_size) counter -= chunk_size def multiple_chunks(self, chunk_size=None): """ Returns ``True`` if you can expect multiple chunks. NB: If a particular file representation is in memory, subclasses should always return ``False`` -- there's no good reason to read from memory in chunks. """ if not chunk_size: chunk_size = self.DEFAULT_CHUNK_SIZE return self.size > chunk_size def __iter__(self): # Iterate over this file-like object by newlines buffer_ = None for chunk in self.chunks(): chunk_buffer = StringIO(chunk) for line in chunk_buffer: if buffer_: line = buffer_ + line buffer_ = None # If this is the end of a line, yield # otherwise, wait for the next round if line[-1] in ('\n', '\r'): yield line else: buffer_ = line if buffer_ is not None: yield buffer_ def __enter__(self): return self def __exit__(self, exc_type, exc_value, tb): self.close() def open(self, mode=None): if not self.closed: self.seek(0) elif self.name and os.path.exists(self.name): self.file = open(self.name, mode or self.mode) else: raise ValueError("The file cannot be reopened.") def close(self): self.file.close() class ContentFile(File): """ A File-like object that takes just raw content, rather than an actual file. """ def __init__(self, content, name=None): content = content or '' super(ContentFile, self).__init__(StringIO(content), name=name) self.size = len(content) def __str__(self): return 'Raw content' def __nonzero__(self): return True def open(self, mode=None): self.seek(0) def close(self): pass
bsd-3-clause
balloob/home-assistant
homeassistant/components/locative/__init__.py
10
4327
"""Support for Locative.""" import logging from typing import Dict from aiohttp import web import voluptuous as vol from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER from homeassistant.const import ( ATTR_ID, ATTR_LATITUDE, ATTR_LONGITUDE, CONF_WEBHOOK_ID, HTTP_OK, HTTP_UNPROCESSABLE_ENTITY, STATE_NOT_HOME, ) from homeassistant.helpers import config_entry_flow import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_send _LOGGER = logging.getLogger(__name__) DOMAIN = "locative" TRACKER_UPDATE = f"{DOMAIN}_tracker_update" ATTR_DEVICE_ID = "device" ATTR_TRIGGER = "trigger" def _id(value: str) -> str: """Coerce id by removing '-'.""" return value.replace("-", "") def _validate_test_mode(obj: Dict) -> Dict: """Validate that id is provided outside of test mode.""" if ATTR_ID not in obj and obj[ATTR_TRIGGER] != "test": raise vol.Invalid("Location id not specified") return obj WEBHOOK_SCHEMA = vol.All( vol.Schema( { vol.Required(ATTR_LATITUDE): cv.latitude, vol.Required(ATTR_LONGITUDE): cv.longitude, vol.Required(ATTR_DEVICE_ID): cv.string, vol.Required(ATTR_TRIGGER): cv.string, vol.Optional(ATTR_ID): vol.All(cv.string, _id), }, extra=vol.ALLOW_EXTRA, ), _validate_test_mode, ) async def async_setup(hass, hass_config): """Set up the Locative component.""" hass.data[DOMAIN] = {"devices": set(), "unsub_device_tracker": {}} return True async def handle_webhook(hass, webhook_id, request): """Handle incoming webhook from Locative.""" try: data = WEBHOOK_SCHEMA(dict(await request.post())) except vol.MultipleInvalid as error: return web.Response(text=error.error_message, status=HTTP_UNPROCESSABLE_ENTITY) device = data[ATTR_DEVICE_ID] location_name = data.get(ATTR_ID, data[ATTR_TRIGGER]).lower() direction = data[ATTR_TRIGGER] gps_location = (data[ATTR_LATITUDE], data[ATTR_LONGITUDE]) if direction == "enter": async_dispatcher_send(hass, TRACKER_UPDATE, device, gps_location, location_name) return web.Response(text=f"Setting location to {location_name}", status=HTTP_OK) if direction == "exit": current_state = hass.states.get(f"{DEVICE_TRACKER}.{device}") if current_state is None or current_state.state == location_name: location_name = STATE_NOT_HOME async_dispatcher_send( hass, TRACKER_UPDATE, device, gps_location, location_name ) return web.Response(text="Setting location to not home", status=HTTP_OK) # Ignore the message if it is telling us to exit a zone that we # aren't currently in. This occurs when a zone is entered # before the previous zone was exited. The enter message will # be sent first, then the exit message will be sent second. return web.Response( text=f"Ignoring exit from {location_name} (already in {current_state})", status=HTTP_OK, ) if direction == "test": # In the app, a test message can be sent. Just return something to # the user to let them know that it works. return web.Response(text="Received test message.", status=HTTP_OK) _LOGGER.error("Received unidentified message from Locative: %s", direction) return web.Response( text=f"Received unidentified message: {direction}", status=HTTP_UNPROCESSABLE_ENTITY, ) async def async_setup_entry(hass, entry): """Configure based on config entry.""" hass.components.webhook.async_register( DOMAIN, "Locative", entry.data[CONF_WEBHOOK_ID], handle_webhook ) hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, DEVICE_TRACKER) ) return True async def async_unload_entry(hass, entry): """Unload a config entry.""" hass.components.webhook.async_unregister(entry.data[CONF_WEBHOOK_ID]) hass.data[DOMAIN]["unsub_device_tracker"].pop(entry.entry_id)() return await hass.config_entries.async_forward_entry_unload(entry, DEVICE_TRACKER) async_remove_entry = config_entry_flow.webhook_async_remove_entry
apache-2.0
googleads/googleads-python-lib
examples/ad_manager/v202011/activity_group_service/get_active_activity_groups.py
1
1957
#!/usr/bin/env python # # Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This example gets all active activity groups. """ # Import appropriate modules from the client library. from googleads import ad_manager def main(client): # Initialize appropriate service. activity_group_service = client.GetService( 'ActivityGroupService', version='v202011') # Create a statement to select activity groups. statement = (ad_manager.StatementBuilder(version='v202011') .Where('status = :status') .WithBindVariable('status', 'ACTIVE')) # Retrieve a small amount of activity groups at a time, paging # through until all activity groups have been retrieved. while True: response = activity_group_service.getActivityGroupsByStatement( statement.ToStatement()) if 'results' in response and len(response['results']): for activity_group in response['results']: # Print out some information for each activity group. print('Activity group with ID "%d" and name "%s" was found.\n' % (activity_group['id'], activity_group['name'])) statement.offset += statement.limit else: break print('\nNumber of results found: %s' % response['totalResultSetSize']) if __name__ == '__main__': # Initialize client object. ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage() main(ad_manager_client)
apache-2.0
macioosch/dynamo-hard-spheres-sim
convergence-plot.py
1
6346
#!/usr/bin/env python2 # encoding=utf-8 from __future__ import division, print_function from glob import glob from itertools import izip from matplotlib import pyplot as plt import numpy as np input_files = glob("csv/convergence-256000-0.*.csv") #input_files = glob("csv/convergence-500000-0.*.csv") #input_files = glob("csv/convergence-1000188-0.*.csv") #plotted_parameter = "msds_diffusion" plotted_parameter = "pressures_collision" #plotted_parameter = "pressures_virial" #plotted_parameter = "msds_val" #plotted_parameter = "times" legend_names = [] tight_layout = False show_legend = False for file_number, file_name in enumerate(sorted(input_files)): data = np.genfromtxt(file_name, delimiter='\t', names=[ "packings","densities","collisions","n_atoms","pressures_virial", "pressures_collision","msds_val","msds_diffusion","times", "std_pressures_virial","std_pressures_collision","std_msds_val", "std_msds_diffusion","std_times"]) n_atoms = data["n_atoms"][0] density = data["densities"][0] equilibrated_collisions = data["collisions"] - 2*data["collisions"][0] \ + data["collisions"][1] """ ### 5 graphs: D(CPS) ### tight_layout = True skip_points = 0 ax = plt.subplot(3, 2, file_number+1) plt.fill_between((equilibrated_collisions / n_atoms)[skip_points:], data[plotted_parameter][skip_points:] - data["std_" + plotted_parameter][skip_points:], data[plotted_parameter][skip_points:] + data["std_" + plotted_parameter][skip_points:], alpha=0.3) plt.plot((equilibrated_collisions / n_atoms)[skip_points:], data[plotted_parameter][skip_points:], lw=2) if plotted_parameter == "msds_diffusion": plt.ylim(0.990*data[plotted_parameter][-1], 1.005*data[plotted_parameter][-1]) plt.xlim([0, 1e5]) plt.legend(["Density {}".format(data["densities"][0])], loc="lower right") ax.yaxis.set_major_formatter(plt.FormatStrFormatter('%.4f')) plt.xlabel("Collisions per sphere") plt.ylabel("D") """ ### 5 graphs: relative D(CPS) ### tight_layout = True skip_points = 0 ax = plt.subplot(3, 2, file_number+1) plt.fill_between((equilibrated_collisions / n_atoms)[skip_points:], -1 + (data[plotted_parameter][skip_points:] - data["std_" + plotted_parameter][skip_points:])/data[plotted_parameter][-1], -1 + (data[plotted_parameter][skip_points:] + data["std_" + plotted_parameter][skip_points:])/data[plotted_parameter][-1], alpha=0.3) plt.plot((equilibrated_collisions / n_atoms)[skip_points:], -1 + data[plotted_parameter][skip_points:]/data[plotted_parameter][-1], lw=2) plt.ylim(data["std_" + plotted_parameter][-1]*20*np.array([-1, 1])/data[plotted_parameter][-1]) #plt.xscale("log") plt.xlim([0, 1e5]) plt.legend(["$\\rho\\sigma^3=\\ {}$".format(data["densities"][0])], loc="lower right") ax.yaxis.set_major_formatter(plt.FormatStrFormatter('%.2e')) plt.xlabel("$C/N$") plt.ylabel("$[Z_{MD}(C) / Z_{MD}(C=10^5 N)] - 1$") """ ### 1 graph: D(t) ### show_legend = True skip_points = 0 plt.title("D(t) for 5 densities") plt.loglog(data["times"][skip_points:], data[plotted_parameter][skip_points:]) legend_names.append(data["densities"][0]) plt.xlabel("Time") plt.ylabel("D") """ """ ### 1 graph: D(t) / Dinf ### show_legend = True skip_points = 0 #plt.fill_between(data["times"][skip_points:], # (data[plotted_parameter] - data["std_" + plotted_parameter]) # / data[plotted_parameter][-1] - 1, # (data[plotted_parameter] + data["std_" + plotted_parameter]) # / data[plotted_parameter][-1] - 1, color="grey", alpha=0.4) plt.plot(data["times"][skip_points:], data[plotted_parameter] / data[plotted_parameter][-1] - 1, lw=1) legend_names.append(data["densities"][0]) #plt.xscale("log") plt.xlabel("Time") plt.ylabel("D / D(t --> inf)") """ """ ### 5 graphs: D(1/CPS) ### tight_layout = True skip_points = 40 ax = plt.subplot(3, 2, file_number+1) plt.fill_between((n_atoms / equilibrated_collisions)[skip_points:], data[plotted_parameter][skip_points:] - data["std_" + plotted_parameter][skip_points:], data[plotted_parameter][skip_points:] + data["std_" + plotted_parameter][skip_points:], alpha=0.3) plt.plot((n_atoms / equilibrated_collisions)[skip_points:], data[plotted_parameter][skip_points:], lw=2) plt.title("Density {}:".format(data["densities"][0])) ax.yaxis.set_major_formatter(plt.FormatStrFormatter('%.7f')) plt.xlim(xmin=0) plt.xlabel("1 / Collisions per sphere") plt.ylabel("D") """ """ ### 1 graph: D(CPS) / Dinf ### show_legend = True plt.fill_between(equilibrated_collisions / n_atoms, (data[plotted_parameter] - data["std_" + plotted_parameter]) / data[plotted_parameter][-1] - 1, (data[plotted_parameter] + data["std_" + plotted_parameter]) / data[plotted_parameter][-1] - 1, color="grey", alpha=0.4) plt.plot(equilibrated_collisions / n_atoms, data[plotted_parameter] / data[plotted_parameter][-1] - 1, lw=2) legend_names.append(data["densities"][0]) plt.xlabel("Collisions per sphere") plt.ylabel("D / D(t --> inf)") """ """ ### 1 graph: D(1/CPS) / Dinf ### show_legend = True plt.fill_between(n_atoms / equilibrated_collisions, (data[plotted_parameter] - data["std_" + plotted_parameter]) / data[plotted_parameter][-1] - 1, (data[plotted_parameter] + data["std_" + plotted_parameter]) / data[plotted_parameter][-1] - 1, color="grey", alpha=0.4) plt.plot( n_atoms / equilibrated_collisions, data[plotted_parameter] / data[plotted_parameter][-1] - 1) legend_names.append(data["densities"][0]) plt.xlabel(" 1 / Collisions per sphere") plt.ylabel(plotted_parameter) """ #if tight_layout: # plt.tight_layout(pad=0.0, w_pad=0.0, h_pad=0.0) if show_legend: plt.legend(legend_names, title="Density:", loc="lower right") plt.show()
gpl-3.0
piffey/ansible
lib/ansible/modules/storage/netapp/na_cdot_user.py
23
10225
#!/usr/bin/python # (c) 2017, NetApp, Inc # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' module: na_cdot_user short_description: useradmin configuration and management extends_documentation_fragment: - netapp.ontap version_added: '2.3' author: Sumit Kumar ([email protected]) description: - Create or destroy users. options: state: description: - Whether the specified user should exist or not. required: true choices: ['present', 'absent'] name: description: - The name of the user to manage. required: true application: description: - Applications to grant access to. required: true choices: ['console', 'http','ontapi','rsh','snmp','sp','ssh','telnet'] authentication_method: description: - Authentication method for the application. - Not all authentication methods are valid for an application. - Valid authentication methods for each application are as denoted in I(authentication_choices_description). - password for console application - password, domain, nsswitch, cert for http application. - password, domain, nsswitch, cert for ontapi application. - community for snmp application (when creating SNMPv1 and SNMPv2 users). - usm and community for snmp application (when creating SNMPv3 users). - password for sp application. - password for rsh application. - password for telnet application. - password, publickey, domain, nsswitch for ssh application. required: true choices: ['community', 'password', 'publickey', 'domain', 'nsswitch', 'usm'] set_password: description: - Password for the user account. - It is ignored for creating snmp users, but is required for creating non-snmp users. - For an existing user, this value will be used as the new password. role_name: description: - The name of the role. Required when C(state=present) vserver: description: - The name of the vserver to use. required: true ''' EXAMPLES = """ - name: Create User na_cdot_user: state: present name: SampleUser application: ssh authentication_method: password set_password: apn1242183u1298u41 role_name: vsadmin vserver: ansibleVServer hostname: "{{ netapp_hostname }}" username: "{{ netapp_username }}" password: "{{ netapp_password }}" """ RETURN = """ """ import traceback from ansible.module_utils.basic import AnsibleModule from ansible.module_utils._text import to_native import ansible.module_utils.netapp as netapp_utils HAS_NETAPP_LIB = netapp_utils.has_netapp_lib() class NetAppCDOTUser(object): """ Common operations to manage users and roles. """ def __init__(self): self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() self.argument_spec.update(dict( state=dict(required=True, choices=['present', 'absent']), name=dict(required=True, type='str'), application=dict(required=True, type='str', choices=[ 'console', 'http', 'ontapi', 'rsh', 'snmp', 'sp', 'ssh', 'telnet']), authentication_method=dict(required=True, type='str', choices=['community', 'password', 'publickey', 'domain', 'nsswitch', 'usm']), set_password=dict(required=False, type='str', default=None), role_name=dict(required=False, type='str'), vserver=dict(required=True, type='str'), )) self.module = AnsibleModule( argument_spec=self.argument_spec, required_if=[ ('state', 'present', ['role_name']) ], supports_check_mode=True ) p = self.module.params # set up state variables self.state = p['state'] self.name = p['name'] self.application = p['application'] self.authentication_method = p['authentication_method'] self.set_password = p['set_password'] self.role_name = p['role_name'] self.vserver = p['vserver'] if HAS_NETAPP_LIB is False: self.module.fail_json(msg="the python NetApp-Lib module is required") else: self.server = netapp_utils.setup_ontap_zapi(module=self.module) def get_user(self): """ Checks if the user exists. :return: True if user found False if user is not found :rtype: bool """ security_login_get_iter = netapp_utils.zapi.NaElement('security-login-get-iter') query_details = netapp_utils.zapi.NaElement.create_node_with_children( 'security-login-account-info', **{'vserver': self.vserver, 'user-name': self.name, 'application': self.application, 'authentication-method': self.authentication_method}) query = netapp_utils.zapi.NaElement('query') query.add_child_elem(query_details) security_login_get_iter.add_child_elem(query) try: result = self.server.invoke_successfully(security_login_get_iter, enable_tunneling=False) if result.get_child_by_name('num-records') and int(result.get_child_content('num-records')) >= 1: return True else: return False except netapp_utils.zapi.NaApiError as e: # Error 16034 denotes a user not being found. if to_native(e.code) == "16034": return False else: self.module.fail_json(msg='Error getting user %s: %s' % (self.name, to_native(e)), exception=traceback.format_exc()) def create_user(self): user_create = netapp_utils.zapi.NaElement.create_node_with_children( 'security-login-create', **{'vserver': self.vserver, 'user-name': self.name, 'application': self.application, 'authentication-method': self.authentication_method, 'role-name': self.role_name}) if self.set_password is not None: user_create.add_new_child('password', self.set_password) try: self.server.invoke_successfully(user_create, enable_tunneling=False) except netapp_utils.zapi.NaApiError as e: self.module.fail_json(msg='Error creating user %s: %s' % (self.name, to_native(e)), exception=traceback.format_exc()) def delete_user(self): user_delete = netapp_utils.zapi.NaElement.create_node_with_children( 'security-login-delete', **{'vserver': self.vserver, 'user-name': self.name, 'application': self.application, 'authentication-method': self.authentication_method}) try: self.server.invoke_successfully(user_delete, enable_tunneling=False) except netapp_utils.zapi.NaApiError as e: self.module.fail_json(msg='Error removing user %s: %s' % (self.name, to_native(e)), exception=traceback.format_exc()) def change_password(self): """ Changes the password :return: True if password updated False if password is not updated :rtype: bool """ self.server.set_vserver(self.vserver) modify_password = netapp_utils.zapi.NaElement.create_node_with_children( 'security-login-modify-password', **{ 'new-password': str(self.set_password), 'user-name': self.name}) try: self.server.invoke_successfully(modify_password, enable_tunneling=True) except netapp_utils.zapi.NaApiError as e: if to_native(e.code) == '13114': return False else: self.module.fail_json(msg='Error setting password for user %s: %s' % (self.name, to_native(e)), exception=traceback.format_exc()) self.server.set_vserver(None) return True def apply(self): property_changed = False password_changed = False user_exists = self.get_user() if user_exists: if self.state == 'absent': property_changed = True elif self.state == 'present': if self.set_password is not None: password_changed = self.change_password() else: if self.state == 'present': # Check if anything needs to be updated property_changed = True if property_changed: if self.module.check_mode: pass else: if self.state == 'present': if not user_exists: self.create_user() # Add ability to update parameters. elif self.state == 'absent': self.delete_user() changed = property_changed or password_changed self.module.exit_json(changed=changed) def main(): v = NetAppCDOTUser() v.apply() if __name__ == '__main__': main()
gpl-3.0
amdouglas/OpenPNM
OpenPNM/Geometry/models/throat_misc.py
1
1124
r""" =============================================================================== throat_misc -- Miscillaneous and generic functions to apply to throats =============================================================================== """ import scipy as _sp def random(geometry, seed=None, num_range=[0, 1], **kwargs): r""" Assign random number to throats note: should this be called 'poisson'? """ range_size = num_range[1] - num_range[0] range_min = num_range[0] _sp.random.seed(seed=seed) value = _sp.random.rand(geometry.num_throats(),) value = value*range_size + range_min return value def neighbor(geometry, network, pore_prop='pore.seed', mode='min', **kwargs): r""" Adopt a value based on the neighboring pores """ throats = network.throats(geometry.name) P12 = network.find_connected_pores(throats) pvalues = network[pore_prop][P12] if mode == 'min': value = _sp.amin(pvalues, axis=1) if mode == 'max': value = _sp.amax(pvalues, axis=1) if mode == 'mean': value = _sp.mean(pvalues, axis=1) return value
mit
Digmaster/TicTacToe
Agent.py
1
2030
from random import randint from random import getrandbits from copy import deepcopy # Agent that will either be the human player or a secondary agent for the dual agent play class DumbAgent: #initialize the board for the first player def __init__(self, board): self.board = board def __str__(self): return "Hi, Im dumb agent. I play randomly as player {0}".format(self.player) # readin the next move for the human or secondary agent def getNextMove(self, player): board = deepcopy(self.board) if(player!='X' and player!='O'): raise ValueError('The only valid players are X and O') while(True): try: square = randint(1, 9) board.setSquare(square, player) return square except ValueError: """Do nothing""" # Define the smart agent - uses the minimax algorithm class SmartAgent: def __init__(self, board): self.board = board self.signal = False self.bestVal = None def __str__(self): return "Hi, Im smart agent. I whatever move will net me the most points, or avail my enemy of points. I'm {0}".format(self.player) # to get the next move,call the decideMove function def getNextMove(self, player): self.decideMove(deepcopy(self.board), player) return self.bestVal def decideMove(self, board, player): if(self.signal): return 0 winner = board.testWin() # test for a winning solution to the current state if(winner!='.'): if(winner=='X'): return 1.0 elif(winner=='T'): return 0.0 else: return -1.0 values = [] moves = {} for i in range(1,10): if(self.signal): return 0 if(board.getSquare(i)=='.'): nBoard = deepcopy(board) nBoard.setSquare(i, player) value = self.decideMove(nBoard, 'X' if player=='O' else 'O') values.append(value) moves[value] = i if(player=='X'and value==1): break elif(player=='O' and value==-1): break # calculate the highest probability / best move if(player=='X'): sum = max(values) else: sum = min(values) self.bestVal = moves[sum] return sum
apache-2.0
boooka/GeoPowerOff
venv/lib/python2.7/site-packages/django/contrib/syndication/views.py
74
8760
from __future__ import unicode_literals from calendar import timegm from django.conf import settings from django.contrib.sites.shortcuts import get_current_site from django.core.exceptions import ImproperlyConfigured, ObjectDoesNotExist from django.http import HttpResponse, Http404 from django.template import loader, TemplateDoesNotExist, RequestContext from django.utils import feedgenerator from django.utils.encoding import force_text, iri_to_uri, smart_text from django.utils.html import escape from django.utils.http import http_date from django.utils import six from django.utils.timezone import get_default_timezone, is_naive, make_aware def add_domain(domain, url, secure=False): protocol = 'https' if secure else 'http' if url.startswith('//'): # Support network-path reference (see #16753) - RSS requires a protocol url = '%s:%s' % (protocol, url) elif not (url.startswith('http://') or url.startswith('https://') or url.startswith('mailto:')): url = iri_to_uri('%s://%s%s' % (protocol, domain, url)) return url class FeedDoesNotExist(ObjectDoesNotExist): pass class Feed(object): feed_type = feedgenerator.DefaultFeed title_template = None description_template = None def __call__(self, request, *args, **kwargs): try: obj = self.get_object(request, *args, **kwargs) except ObjectDoesNotExist: raise Http404('Feed object does not exist.') feedgen = self.get_feed(obj, request) response = HttpResponse(content_type=feedgen.mime_type) if hasattr(self, 'item_pubdate') or hasattr(self, 'item_updateddate'): # if item_pubdate or item_updateddate is defined for the feed, set # header so as ConditionalGetMiddleware is able to send 304 NOT MODIFIED response['Last-Modified'] = http_date( timegm(feedgen.latest_post_date().utctimetuple())) feedgen.write(response, 'utf-8') return response def item_title(self, item): # Titles should be double escaped by default (see #6533) return escape(force_text(item)) def item_description(self, item): return force_text(item) def item_link(self, item): try: return item.get_absolute_url() except AttributeError: raise ImproperlyConfigured('Give your %s class a get_absolute_url() method, or define an item_link() method in your Feed class.' % item.__class__.__name__) def __get_dynamic_attr(self, attname, obj, default=None): try: attr = getattr(self, attname) except AttributeError: return default if callable(attr): # Check co_argcount rather than try/excepting the function and # catching the TypeError, because something inside the function # may raise the TypeError. This technique is more accurate. try: code = six.get_function_code(attr) except AttributeError: code = six.get_function_code(attr.__call__) if code.co_argcount == 2: # one argument is 'self' return attr(obj) else: return attr() return attr def feed_extra_kwargs(self, obj): """ Returns an extra keyword arguments dictionary that is used when initializing the feed generator. """ return {} def item_extra_kwargs(self, item): """ Returns an extra keyword arguments dictionary that is used with the `add_item` call of the feed generator. """ return {} def get_object(self, request, *args, **kwargs): return None def get_context_data(self, **kwargs): """ Returns a dictionary to use as extra context if either ``self.description_template`` or ``self.item_template`` are used. Default implementation preserves the old behavior of using {'obj': item, 'site': current_site} as the context. """ return {'obj': kwargs.get('item'), 'site': kwargs.get('site')} def get_feed(self, obj, request): """ Returns a feedgenerator.DefaultFeed object, fully populated, for this feed. Raises FeedDoesNotExist for invalid parameters. """ current_site = get_current_site(request) link = self.__get_dynamic_attr('link', obj) link = add_domain(current_site.domain, link, request.is_secure()) feed = self.feed_type( title=self.__get_dynamic_attr('title', obj), subtitle=self.__get_dynamic_attr('subtitle', obj), link=link, description=self.__get_dynamic_attr('description', obj), language=settings.LANGUAGE_CODE, feed_url=add_domain( current_site.domain, self.__get_dynamic_attr('feed_url', obj) or request.path, request.is_secure(), ), author_name=self.__get_dynamic_attr('author_name', obj), author_link=self.__get_dynamic_attr('author_link', obj), author_email=self.__get_dynamic_attr('author_email', obj), categories=self.__get_dynamic_attr('categories', obj), feed_copyright=self.__get_dynamic_attr('feed_copyright', obj), feed_guid=self.__get_dynamic_attr('feed_guid', obj), ttl=self.__get_dynamic_attr('ttl', obj), **self.feed_extra_kwargs(obj) ) title_tmp = None if self.title_template is not None: try: title_tmp = loader.get_template(self.title_template) except TemplateDoesNotExist: pass description_tmp = None if self.description_template is not None: try: description_tmp = loader.get_template(self.description_template) except TemplateDoesNotExist: pass for item in self.__get_dynamic_attr('items', obj): context = self.get_context_data(item=item, site=current_site, obj=obj, request=request) if title_tmp is not None: title = title_tmp.render(RequestContext(request, context)) else: title = self.__get_dynamic_attr('item_title', item) if description_tmp is not None: description = description_tmp.render(RequestContext(request, context)) else: description = self.__get_dynamic_attr('item_description', item) link = add_domain( current_site.domain, self.__get_dynamic_attr('item_link', item), request.is_secure(), ) enc = None enc_url = self.__get_dynamic_attr('item_enclosure_url', item) if enc_url: enc = feedgenerator.Enclosure( url=smart_text(enc_url), length=smart_text(self.__get_dynamic_attr('item_enclosure_length', item)), mime_type=smart_text(self.__get_dynamic_attr('item_enclosure_mime_type', item)) ) author_name = self.__get_dynamic_attr('item_author_name', item) if author_name is not None: author_email = self.__get_dynamic_attr('item_author_email', item) author_link = self.__get_dynamic_attr('item_author_link', item) else: author_email = author_link = None tz = get_default_timezone() pubdate = self.__get_dynamic_attr('item_pubdate', item) if pubdate and is_naive(pubdate): pubdate = make_aware(pubdate, tz) updateddate = self.__get_dynamic_attr('item_updateddate', item) if updateddate and is_naive(updateddate): updateddate = make_aware(updateddate, tz) feed.add_item( title=title, link=link, description=description, unique_id=self.__get_dynamic_attr('item_guid', item, link), unique_id_is_permalink=self.__get_dynamic_attr( 'item_guid_is_permalink', item), enclosure=enc, pubdate=pubdate, updateddate=updateddate, author_name=author_name, author_email=author_email, author_link=author_link, categories=self.__get_dynamic_attr('item_categories', item), item_copyright=self.__get_dynamic_attr('item_copyright', item), **self.item_extra_kwargs(item) ) return feed
apache-2.0
mfit/PdfTableAnnotator
script/csv-compare.py
1
8051
""" Copyright 2014 Matthias Frey Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ """ CSV-compare ----------- Compare table data stored in CSV (comma seperated values) format. """ import re import csv import sys import os def _pr_list(l1, l2, replace_chars = '[\n ]'): """ Calculate precision and recall regarding elements of a list. When a 1:1 match cannot be achieved, the list pointers will be moved forward until a match occurs (first of list A, then of list B). The closest match will count, and matching will continue from those list positions onwards. The replace_chars parameter is used to remove characters from the strings before comparing. The default will remove newlines and spaces. """ def _fnext(l, item): item = re.sub(replace_chars, '', item).strip() for i, txt in enumerate(l): txt = re.sub(replace_chars, '', txt).strip() if txt == item: return i return -1 if len(l2)==0 or len(l1)==0: return 0, 0 i = 0 j = 0 match = 0 while len(l1)>i and len(l2)>j: t1 = re.sub(replace_chars, '', l1[i]).strip() t2 = re.sub(replace_chars, '', l2[j]).strip() if t1 == t2: match += 1 i += 1 j += 1 else: ii = _fnext(l1[i:], l2[j]) jj = _fnext(l2[j:], l1[i]) if ii>=0 and (ii<jj or jj<0): i+=ii elif jj>=0: j+=jj else: i+=1 j+=1 return float(match)/len(l2), float(match)/len(l1) def clean_table(tab): """ Remove trailing empty cells resulting from the way some spreadsheet application output csv for multi table documents. """ if len(tab) == 0: return [] n_empty=[] for row in tab: for n, val in enumerate(reversed(row)): if val!='': break n_empty.append(n) strip_cols = min(n_empty) cleaned = [] for row in tab: cleaned.append(row[0:len(row)-strip_cols]) return cleaned def compare_tables(tab1, tab2): """ Compare two tables (2dim lists). """ info = {'rows_a':len(tab1), 'rows_b':len(tab2), 'rows_match': 1 if len(tab1) == len(tab2) else 0, } sizesA = [len(l) for l in tab1] sizesB = [len(l) for l in tab2] info['dim_match'] = 1 if sizesA == sizesB else 0 info['size_a'] = sum(sizesA) info['size_b'] = sum(sizesA) if len(sizesA)>0 and len(sizesB)>0: info['cols_match'] = 1 if min(sizesA) == max(sizesA) and \ min(sizesB) == max(sizesB) and min(sizesA) == min(sizesB) else 0 # 'flatten' tables cellsA = [] cellsB = [] for r in tab1: cellsA += [c for c in r] for r in tab2: cellsB += [c for c in r] info['p'], info['r'] = _pr_list(cellsA, cellsB) info['F1'] = F1(info['p'], info['r']) return info def compare_files_pr(file1, file2): """ Calculate simple P/R . Compare lists of cells, left to right , top to bottom. """ cells = [[], []] for i, fname in enumerate([file1, file2]): with file(fname) as csvfile: rd = csv.reader(csvfile, delimiter=',', quotechar='"') for r in rd: cells[i] += [c for c in r] return _pr_list(*cells) def compare_files(file1, file2): """ Compare two csv files. """ groundtruth = read_tables_from_file(file1) try: compare = read_tables_from_file(file2) except: compare = [] tbs = [groundtruth, compare] finfo = {'tabcount_a': len(tbs[0]), 'tabcount_b': len(tbs[1]), 'tabcount_match': len(tbs[0]) == len(tbs[1]), } finfo['tables']=[] for n in range(0, len(tbs[0])): if finfo['tabcount_match']: comp_info = compare_tables(tbs[0][n], tbs[1][n]) else: if n < len(tbs[1]): comp_info = compare_tables(tbs[0][n], tbs[1][n]) else: comp_info = compare_tables(tbs[0][n], [[]]) comp_info['n']=n finfo['tables'].append(comp_info) return finfo def output_compareinfo_csv(file, info, fields=['p', 'r', 'F1']): """ Pre-format a row that holds measures about similarity of a table to the ground truth. """ lines = [] tabmatch = 1 if info['tabcount_match'] else 0 for tinfo in info['tables']: lines.append([file, str(tabmatch)] + [str(tinfo[k]) for k in fields]) return lines def F1(p, r): """ Calculate F1 score from precision and recall. Returns zero if one of p, r is zero. """ return (2*p*r/(p+r)) if p != 0 and r != 0 else 0 def read_tables_from_file(csvfile): """ Opens csvfile, returns all tables found. Guesses csv format (delimiter, etc.) Splits data into different tables at newline (or empty row). Returns list of tables. """ tables=[] table_id = 0 with file(csvfile) as f: sniffer = csv.Sniffer() dialect = sniffer.sniff(f.next()) rd = csv.reader(f, delimiter=dialect.delimiter, quotechar=dialect.quotechar) for r in rd: if len(tables) <= table_id: tables.append([]) # Begin next table if there is an empty line if r == [] or sum([len(v) for v in r]) == 0: if len(tables[table_id])>0: table_id+=1 else: tables[table_id].append(r) return [clean_table(t) for t in tables if t!=[]] if __name__ == '__main__': """ Script usage. """ fields = [ #'rows_a', 'rows_b', #'size_a', 'size_b', 'n', 'rows_match', 'cols_match', 'dim_match', 'p', 'r', 'F1',] limitchar = ' & ' if len(sys.argv) < 3: print "Specify two (csv-)files or directories" quit(-1) # Params 1 + 2 are files or directories file1 = sys.argv[1] file2 = sys.argv[2] srcinfo = [os.path.basename(file1), os.path.basename(file2)] # 3rd parameter becomes 'tooldef' (text cols to name rows), # and 4th parameter tells whether to print headers tooldef = sys.argv[3].split('-') if len(sys.argv) > 3 else ['na', 'na'] print_headers = len(sys.argv) > 4 and sys.argv[4] in ["1", "y", "yes"] if print_headers: print ','.join(['name', 'tool', 'src1', 'src2', 'filename', 'tabsmatch',] + fields) if os.path.isfile(file1) and os.path.isfile(file2): inf = compare_files(file1, file2) lines = output_compareinfo_csv(file1, inf, fields) for l in lines: print ','.join(tooldef + srcinfo + l) elif os.path.isdir(file1) and os.path.isdir(file2): for f in [path for path in os.listdir(file1) if path[-4:]=='.csv']: if os.path.isfile(file2 + '/' + f): inf = compare_files(file1 + '/' + f, file2 + '/' + f) lines = output_compareinfo_csv(f, inf, fields) for l in lines: print ','.join(tooldef + srcinfo + l) else: print ','.join(['','',] + srcinfo + ['', "Missing {} for {} {}".format(f, *tooldef)])
apache-2.0
michael-dev2rights/ansible
lib/ansible/modules/database/mssql/mssql_db.py
29
7066
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2014, Vedit Firat Arig <[email protected]> # Outline and parts are reused from Mark Theunissen's mysql_db module # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: mssql_db short_description: Add or remove MSSQL databases from a remote host. description: - Add or remove MSSQL databases from a remote host. version_added: "2.2" options: name: description: - name of the database to add or remove required: true default: null aliases: [ db ] login_user: description: - The username used to authenticate with required: false default: null login_password: description: - The password used to authenticate with required: false default: null login_host: description: - Host running the database required: false login_port: description: - Port of the MSSQL server. Requires login_host be defined as other then localhost if login_port is used required: false default: 1433 state: description: - The database state required: false default: present choices: [ "present", "absent", "import" ] target: description: - Location, on the remote host, of the dump file to read from or write to. Uncompressed SQL files (C(.sql)) files are supported. required: false autocommit: description: - Automatically commit the change only if the import succeed. Sometimes it is necessary to use autocommit=true, since some content can't be changed within a transaction. required: false default: false choices: [ "false", "true" ] notes: - Requires the pymssql Python package on the remote host. For Ubuntu, this is as easy as pip install pymssql (See M(pip).) requirements: - python >= 2.7 - pymssql author: Vedit Firat Arig ''' EXAMPLES = ''' # Create a new database with name 'jackdata' - mssql_db: name: jackdata state: present # Copy database dump file to remote host and restore it to database 'my_db' - copy: src: dump.sql dest: /tmp - mssql_db: name: my_db state: import target: /tmp/dump.sql ''' RETURN = ''' # ''' import os try: import pymssql except ImportError: mssql_found = False else: mssql_found = True from ansible.module_utils.basic import AnsibleModule def db_exists(conn, cursor, db): cursor.execute("SELECT name FROM master.sys.databases WHERE name = %s", db) conn.commit() return bool(cursor.rowcount) def db_create(conn, cursor, db): cursor.execute("CREATE DATABASE [%s]" % db) return db_exists(conn, cursor, db) def db_delete(conn, cursor, db): try: cursor.execute("ALTER DATABASE [%s] SET single_user WITH ROLLBACK IMMEDIATE" % db) except: pass cursor.execute("DROP DATABASE [%s]" % db) return not db_exists(conn, cursor, db) def db_import(conn, cursor, module, db, target): if os.path.isfile(target): backup = open(target, 'r') try: sqlQuery = "USE [%s]\n" % db for line in backup: if line is None: break elif line.startswith('GO'): cursor.execute(sqlQuery) sqlQuery = "USE [%s]\n" % db else: sqlQuery += line cursor.execute(sqlQuery) conn.commit() finally: backup.close() return 0, "import successful", "" else: return 1, "cannot find target file", "cannot find target file" def main(): module = AnsibleModule( argument_spec=dict( name=dict(required=True, aliases=['db']), login_user=dict(default=''), login_password=dict(default='', no_log=True), login_host=dict(required=True), login_port=dict(default='1433'), target=dict(default=None), autocommit=dict(type='bool', default=False), state=dict( default='present', choices=['present', 'absent', 'import']) ) ) if not mssql_found: module.fail_json(msg="pymssql python module is required") db = module.params['name'] state = module.params['state'] autocommit = module.params['autocommit'] target = module.params["target"] login_user = module.params['login_user'] login_password = module.params['login_password'] login_host = module.params['login_host'] login_port = module.params['login_port'] login_querystring = login_host if login_port != "1433": login_querystring = "%s:%s" % (login_host, login_port) if login_user != "" and login_password == "": module.fail_json(msg="when supplying login_user arguments login_password must be provided") try: conn = pymssql.connect(user=login_user, password=login_password, host=login_querystring, database='master') cursor = conn.cursor() except Exception as e: if "Unknown database" in str(e): errno, errstr = e.args module.fail_json(msg="ERROR: %s %s" % (errno, errstr)) else: module.fail_json(msg="unable to connect, check login_user and login_password are correct, or alternatively check your " "@sysconfdir@/freetds.conf / ${HOME}/.freetds.conf") conn.autocommit(True) changed = False if db_exists(conn, cursor, db): if state == "absent": try: changed = db_delete(conn, cursor, db) except Exception as e: module.fail_json(msg="error deleting database: " + str(e)) elif state == "import": conn.autocommit(autocommit) rc, stdout, stderr = db_import(conn, cursor, module, db, target) if rc != 0: module.fail_json(msg="%s" % stderr) else: module.exit_json(changed=True, db=db, msg=stdout) else: if state == "present": try: changed = db_create(conn, cursor, db) except Exception as e: module.fail_json(msg="error creating database: " + str(e)) elif state == "import": try: changed = db_create(conn, cursor, db) except Exception as e: module.fail_json(msg="error creating database: " + str(e)) conn.autocommit(autocommit) rc, stdout, stderr = db_import(conn, cursor, module, db, target) if rc != 0: module.fail_json(msg="%s" % stderr) else: module.exit_json(changed=True, db=db, msg=stdout) module.exit_json(changed=changed, db=db) if __name__ == '__main__': main()
gpl-3.0
jshiv/turntable
test/lib/python2.7/site-packages/scipy/lib/lapack/tests/test_gesv.py
13
3510
from __future__ import division, print_function, absolute_import import numpy as np from numpy.testing import TestCase, assert_array_almost_equal, dec, \ assert_equal, assert_, run_module_suite from common import FUNCS_TP, FLAPACK_IS_EMPTY, CLAPACK_IS_EMPTY, FUNCS_FLAPACK, \ FUNCS_CLAPACK, PREC A = np.array([[1,2,3],[2,2,3],[3,3,6]]) B = np.array([[10,-1,1],[-1,8,-2],[1,-2,6]]) class TestSygv(TestCase): def _test_base(self, func, lang, itype): tp = FUNCS_TP[func] a = A.astype(tp) b = B.astype(tp) if lang == 'C': f = FUNCS_CLAPACK[func] elif lang == 'F': f = FUNCS_FLAPACK[func] else: raise ValueError("Lang %s ??" % lang) w, v, info = f(a, b, itype=itype) assert_(not info, msg=repr(info)) for i in range(3): if itype == 1: assert_array_almost_equal(np.dot(a,v[:,i]), w[i]*np.dot(b,v[:,i]), decimal=PREC[tp]) elif itype == 2: assert_array_almost_equal(np.dot(a,np.dot(b,v[:,i])), w[i]*v[:,i], decimal=PREC[tp]) elif itype == 3: assert_array_almost_equal(np.dot(b,np.dot(a,v[:,i])), w[i]*v[:,i], decimal=PREC[tp] - 1) else: raise ValueError(itype) @dec.skipif(FLAPACK_IS_EMPTY, "Flapack empty, skip flapack test") def test_ssygv_1(self): self._test_base('ssygv', 'F', 1) @dec.skipif(FLAPACK_IS_EMPTY, "Flapack empty, skip flapack test") def test_ssygv_2(self): self._test_base('ssygv', 'F', 2) @dec.skipif(FLAPACK_IS_EMPTY, "Flapack empty, skip flapack test") def test_ssygv_3(self): self._test_base('ssygv', 'F', 3) @dec.skipif(FLAPACK_IS_EMPTY, "Flapack empty, skip flapack test") def test_dsygv_1(self): self._test_base('dsygv', 'F', 1) @dec.skipif(FLAPACK_IS_EMPTY, "Flapack empty, skip flapack test") def test_dsygv_2(self): self._test_base('dsygv', 'F', 2) @dec.skipif(FLAPACK_IS_EMPTY, "Flapack empty, skip flapack test") def test_dsygv_3(self): self._test_base('dsygv', 'F', 3) @dec.skipif(CLAPACK_IS_EMPTY or not FUNCS_CLAPACK["ssygv"], "Clapack empty, skip flapack test") def test_clapack_ssygv_1(self): self._test_base('ssygv', 'C', 1) @dec.skipif(CLAPACK_IS_EMPTY or not FUNCS_CLAPACK["ssygv"], "Clapack empty, skip flapack test") def test_clapack_ssygv_2(self): self._test_base('ssygv', 'C', 2) @dec.skipif(CLAPACK_IS_EMPTY or not FUNCS_CLAPACK["ssygv"], "Clapack empty, skip flapack test") def test_clapack_ssygv_3(self): self._test_base('ssygv', 'C', 3) @dec.skipif(CLAPACK_IS_EMPTY or not FUNCS_CLAPACK["dsygv"], "Clapack empty, skip flapack test") def test_clapack_dsygv_1(self): self._test_base('dsygv', 'C', 1) @dec.skipif(CLAPACK_IS_EMPTY or not FUNCS_CLAPACK["dsygv"], "Clapack empty, skip flapack test") def test_clapack_dsygv_2(self): self._test_base('dsygv', 'C', 2) @dec.skipif(CLAPACK_IS_EMPTY or not FUNCS_CLAPACK["dsygv"], "Clapack empty, skip flapack test") def test_clapack_dsygv_3(self): self._test_base('dsygv', 'C', 3) if __name__ == "__main__": run_module_suite()
mit
Garrett-R/scikit-learn
sklearn/datasets/samples_generator.py
14
54612
""" Generate samples of synthetic data sets. """ # Authors: B. Thirion, G. Varoquaux, A. Gramfort, V. Michel, O. Grisel, # G. Louppe, J. Nothman # License: BSD 3 clause import numbers import warnings import array import numpy as np from scipy import linalg import scipy.sparse as sp from ..preprocessing import MultiLabelBinarizer from ..utils import check_array, check_random_state from ..utils import shuffle as util_shuffle from ..utils.fixes import astype from ..utils.random import sample_without_replacement from ..externals import six map = six.moves.map zip = six.moves.zip def _generate_hypercube(samples, dimensions, rng): """Returns distinct binary samples of length dimensions """ if dimensions > 30: return np.hstack([_generate_hypercube(samples, dimensions - 30, rng), _generate_hypercube(samples, 30, rng)]) out = astype(sample_without_replacement(2 ** dimensions, samples, random_state=rng), dtype='>u4', copy=False) out = np.unpackbits(out.view('>u1')).reshape((-1, 32))[:, -dimensions:] return out def make_classification(n_samples=100, n_features=20, n_informative=2, n_redundant=2, n_repeated=0, n_classes=2, n_clusters_per_class=2, weights=None, flip_y=0.01, class_sep=1.0, hypercube=True, shift=0.0, scale=1.0, shuffle=True, random_state=None): """Generate a random n-class classification problem. This initially creates clusters of points normally distributed (std=1) about vertices of a `2 * class_sep`-sided hypercube, and assigns an equal number of clusters to each class. It introduces interdependence between these features and adds various types of further noise to the data. Prior to shuffling, `X` stacks a number of these primary "informative" features, "redundant" linear combinations of these, "repeated" duplicates of sampled features, and arbitrary noise for and remaining features. Parameters ---------- n_samples : int, optional (default=100) The number of samples. n_features : int, optional (default=20) The total number of features. These comprise `n_informative` informative features, `n_redundant` redundant features, `n_repeated` duplicated features and `n_features-n_informative-n_redundant- n_repeated` useless features drawn at random. n_informative : int, optional (default=2) The number of informative features. Each class is composed of a number of gaussian clusters each located around the vertices of a hypercube in a subspace of dimension `n_informative`. For each cluster, informative features are drawn independently from N(0, 1) and then randomly linearly combined within each cluster in order to add covariance. The clusters are then placed on the vertices of the hypercube. n_redundant : int, optional (default=2) The number of redundant features. These features are generated as random linear combinations of the informative features. n_repeated : int, optional (default=0) The number of duplicated features, drawn randomly from the informative and the redundant features. n_classes : int, optional (default=2) The number of classes (or labels) of the classification problem. n_clusters_per_class : int, optional (default=2) The number of clusters per class. weights : list of floats or None (default=None) The proportions of samples assigned to each class. If None, then classes are balanced. Note that if `len(weights) == n_classes - 1`, then the last class weight is automatically inferred. More than `n_samples` samples may be returned if the sum of `weights` exceeds 1. flip_y : float, optional (default=0.01) The fraction of samples whose class are randomly exchanged. class_sep : float, optional (default=1.0) The factor multiplying the hypercube dimension. hypercube : boolean, optional (default=True) If True, the clusters are put on the vertices of a hypercube. If False, the clusters are put on the vertices of a random polytope. shift : float, array of shape [n_features] or None, optional (default=0.0) Shift features by the specified value. If None, then features are shifted by a random value drawn in [-class_sep, class_sep]. scale : float, array of shape [n_features] or None, optional (default=1.0) Multiply features by the specified value. If None, then features are scaled by a random value drawn in [1, 100]. Note that scaling happens after shifting. shuffle : boolean, optional (default=True) Shuffle the samples and the features. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Returns ------- X : array of shape [n_samples, n_features] The generated samples. y : array of shape [n_samples] The integer labels for class membership of each sample. Notes ----- The algorithm is adapted from Guyon [1] and was designed to generate the "Madelon" dataset. References ---------- .. [1] I. Guyon, "Design of experiments for the NIPS 2003 variable selection benchmark", 2003. See also -------- make_blobs: simplified variant make_multilabel_classification: unrelated generator for multilabel tasks """ generator = check_random_state(random_state) # Count features, clusters and samples if n_informative + n_redundant + n_repeated > n_features: raise ValueError("Number of informative, redundant and repeated " "features must sum to less than the number of total" " features") if 2 ** n_informative < n_classes * n_clusters_per_class: raise ValueError("n_classes * n_clusters_per_class must" " be smaller or equal 2 ** n_informative") if weights and len(weights) not in [n_classes, n_classes - 1]: raise ValueError("Weights specified but incompatible with number " "of classes.") n_useless = n_features - n_informative - n_redundant - n_repeated n_clusters = n_classes * n_clusters_per_class if weights and len(weights) == (n_classes - 1): weights.append(1.0 - sum(weights)) if weights is None: weights = [1.0 / n_classes] * n_classes weights[-1] = 1.0 - sum(weights[:-1]) # Distribute samples among clusters by weight n_samples_per_cluster = [] for k in range(n_clusters): n_samples_per_cluster.append(int(n_samples * weights[k % n_classes] / n_clusters_per_class)) for i in range(n_samples - sum(n_samples_per_cluster)): n_samples_per_cluster[i % n_clusters] += 1 # Intialize X and y X = np.zeros((n_samples, n_features)) y = np.zeros(n_samples, dtype=np.int) # Build the polytope whose vertices become cluster centroids centroids = _generate_hypercube(n_clusters, n_informative, generator).astype(float) centroids *= 2 * class_sep centroids -= class_sep if not hypercube: centroids *= generator.rand(n_clusters, 1) centroids *= generator.rand(1, n_informative) # Initially draw informative features from the standard normal X[:, :n_informative] = generator.randn(n_samples, n_informative) # Create each cluster; a variant of make_blobs stop = 0 for k, centroid in enumerate(centroids): start, stop = stop, stop + n_samples_per_cluster[k] y[start:stop] = k % n_classes # assign labels X_k = X[start:stop, :n_informative] # slice a view of the cluster A = 2 * generator.rand(n_informative, n_informative) - 1 X_k[...] = np.dot(X_k, A) # introduce random covariance X_k += centroid # shift the cluster to a vertex # Create redundant features if n_redundant > 0: B = 2 * generator.rand(n_informative, n_redundant) - 1 X[:, n_informative:n_informative + n_redundant] = \ np.dot(X[:, :n_informative], B) # Repeat some features if n_repeated > 0: n = n_informative + n_redundant indices = ((n - 1) * generator.rand(n_repeated) + 0.5).astype(np.intp) X[:, n:n + n_repeated] = X[:, indices] # Fill useless features if n_useless > 0: X[:, -n_useless:] = generator.randn(n_samples, n_useless) # Randomly replace labels if flip_y >= 0.0: flip_mask = generator.rand(n_samples) < flip_y y[flip_mask] = generator.randint(n_classes, size=flip_mask.sum()) # Randomly shift and scale if shift is None: shift = (2 * generator.rand(n_features) - 1) * class_sep X += shift if scale is None: scale = 1 + 100 * generator.rand(n_features) X *= scale if shuffle: # Randomly permute samples X, y = util_shuffle(X, y, random_state=generator) # Randomly permute features indices = np.arange(n_features) generator.shuffle(indices) X[:, :] = X[:, indices] return X, y def make_multilabel_classification(n_samples=100, n_features=20, n_classes=5, n_labels=2, length=50, allow_unlabeled=True, sparse=False, return_indicator=False, return_distributions=False, random_state=None): """Generate a random multilabel classification problem. For each sample, the generative process is: - pick the number of labels: n ~ Poisson(n_labels) - n times, choose a class c: c ~ Multinomial(theta) - pick the document length: k ~ Poisson(length) - k times, choose a word: w ~ Multinomial(theta_c) In the above process, rejection sampling is used to make sure that n is never zero or more than `n_classes`, and that the document length is never zero. Likewise, we reject classes which have already been chosen. Parameters ---------- n_samples : int, optional (default=100) The number of samples. n_features : int, optional (default=20) The total number of features. n_classes : int, optional (default=5) The number of classes of the classification problem. n_labels : int, optional (default=2) The average number of labels per instance. More precisely, the number of labels per sample is drawn from a Poisson distribution with ``n_labels`` as its expected value, but samples are bounded (using rejection sampling) by ``n_classes``, and must be nonzero if ``allow_unlabeled`` is False. length : int, optional (default=50) The sum of the features (number of words if documents) is drawn from a Poisson distribution with this expected value. allow_unlabeled : bool, optional (default=True) If ``True``, some instances might not belong to any class. sparse : bool, optional (default=False) If ``True``, return a sparse feature matrix return_indicator : bool, optional (default=False), If ``True``, return ``Y`` in the binary indicator format, else return a tuple of lists of labels. return_distributions : bool, optional (default=False) If ``True``, return the prior class probability and conditional probabilities of features given classes, from which the data was drawn. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Returns ------- X : array or sparse CSR matrix of shape [n_samples, n_features] The generated samples. Y : tuple of lists or array of shape [n_samples, n_classes] The label sets. p_c : array, shape [n_classes] The probability of each class being drawn. Only returned if ``return_distributions=True``. p_w_c : array, shape [n_features, n_classes] The probability of each feature being drawn given each class. Only returned if ``return_distributions=True``. """ generator = check_random_state(random_state) p_c = generator.rand(n_classes) p_c /= p_c.sum() cumulative_p_c = np.cumsum(p_c) p_w_c = generator.rand(n_features, n_classes) p_w_c /= np.sum(p_w_c, axis=0) def sample_example(): _, n_classes = p_w_c.shape # pick a nonzero number of labels per document by rejection sampling y_size = n_classes + 1 while (not allow_unlabeled and y_size == 0) or y_size > n_classes: y_size = generator.poisson(n_labels) # pick n classes y = set() while len(y) != y_size: # pick a class with probability P(c) c = np.searchsorted(cumulative_p_c, generator.rand(y_size - len(y))) y.update(c) y = list(y) # pick a non-zero document length by rejection sampling n_words = 0 while n_words == 0: n_words = generator.poisson(length) # generate a document of length n_words if len(y) == 0: # if sample does not belong to any class, generate noise word words = generator.randint(n_features, size=n_words) return words, y # sample words with replacement from selected classes cumulative_p_w_sample = p_w_c.take(y, axis=1).sum(axis=1).cumsum() cumulative_p_w_sample /= cumulative_p_w_sample[-1] words = np.searchsorted(cumulative_p_w_sample, generator.rand(n_words)) return words, y X_indices = array.array('i') X_indptr = array.array('i', [0]) Y = [] for i in range(n_samples): words, y = sample_example() X_indices.extend(words) X_indptr.append(len(X_indices)) Y.append(y) X_data = np.ones(len(X_indices), dtype=np.float64) X = sp.csr_matrix((X_data, X_indices, X_indptr), shape=(n_samples, n_features)) X.sum_duplicates() if not sparse: X = X.toarray() if return_indicator: lb = MultiLabelBinarizer() Y = lb.fit([range(n_classes)]).transform(Y) else: warnings.warn('Support for the sequence of sequences multilabel ' 'representation is being deprecated and replaced with ' 'a sparse indicator matrix. ' 'return_indicator will default to True from version ' '0.17.', DeprecationWarning) if return_distributions: return X, Y, p_c, p_w_c return X, Y def make_hastie_10_2(n_samples=12000, random_state=None): """Generates data for binary classification used in Hastie et al. 2009, Example 10.2. The ten features are standard independent Gaussian and the target ``y`` is defined by:: y[i] = 1 if np.sum(X[i] ** 2) > 9.34 else -1 Parameters ---------- n_samples : int, optional (default=12000) The number of samples. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Returns ------- X : array of shape [n_samples, 10] The input samples. y : array of shape [n_samples] The output values. References ---------- .. [1] T. Hastie, R. Tibshirani and J. Friedman, "Elements of Statistical Learning Ed. 2", Springer, 2009. See also -------- make_gaussian_quantiles: a generalization of this dataset approach """ rs = check_random_state(random_state) shape = (n_samples, 10) X = rs.normal(size=shape).reshape(shape) y = ((X ** 2.0).sum(axis=1) > 9.34).astype(np.float64) y[y == 0.0] = -1.0 return X, y def make_regression(n_samples=100, n_features=100, n_informative=10, n_targets=1, bias=0.0, effective_rank=None, tail_strength=0.5, noise=0.0, shuffle=True, coef=False, random_state=None): """Generate a random regression problem. The input set can either be well conditioned (by default) or have a low rank-fat tail singular profile. See :func:`make_low_rank_matrix` for more details. The output is generated by applying a (potentially biased) random linear regression model with `n_informative` nonzero regressors to the previously generated input and some gaussian centered noise with some adjustable scale. Parameters ---------- n_samples : int, optional (default=100) The number of samples. n_features : int, optional (default=100) The number of features. n_informative : int, optional (default=10) The number of informative features, i.e., the number of features used to build the linear model used to generate the output. n_targets : int, optional (default=1) The number of regression targets, i.e., the dimension of the y output vector associated with a sample. By default, the output is a scalar. bias : float, optional (default=0.0) The bias term in the underlying linear model. effective_rank : int or None, optional (default=None) if not None: The approximate number of singular vectors required to explain most of the input data by linear combinations. Using this kind of singular spectrum in the input allows the generator to reproduce the correlations often observed in practice. if None: The input set is well conditioned, centered and gaussian with unit variance. tail_strength : float between 0.0 and 1.0, optional (default=0.5) The relative importance of the fat noisy tail of the singular values profile if `effective_rank` is not None. noise : float, optional (default=0.0) The standard deviation of the gaussian noise applied to the output. shuffle : boolean, optional (default=True) Shuffle the samples and the features. coef : boolean, optional (default=False) If True, the coefficients of the underlying linear model are returned. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Returns ------- X : array of shape [n_samples, n_features] The input samples. y : array of shape [n_samples] or [n_samples, n_targets] The output values. coef : array of shape [n_features] or [n_features, n_targets], optional The coefficient of the underlying linear model. It is returned only if coef is True. """ n_informative = min(n_features, n_informative) generator = check_random_state(random_state) if effective_rank is None: # Randomly generate a well conditioned input set X = generator.randn(n_samples, n_features) else: # Randomly generate a low rank, fat tail input set X = make_low_rank_matrix(n_samples=n_samples, n_features=n_features, effective_rank=effective_rank, tail_strength=tail_strength, random_state=generator) # Generate a ground truth model with only n_informative features being non # zeros (the other features are not correlated to y and should be ignored # by a sparsifying regularizers such as L1 or elastic net) ground_truth = np.zeros((n_features, n_targets)) ground_truth[:n_informative, :] = 100 * generator.rand(n_informative, n_targets) y = np.dot(X, ground_truth) + bias # Add noise if noise > 0.0: y += generator.normal(scale=noise, size=y.shape) # Randomly permute samples and features if shuffle: X, y = util_shuffle(X, y, random_state=generator) indices = np.arange(n_features) generator.shuffle(indices) X[:, :] = X[:, indices] ground_truth = ground_truth[indices] y = np.squeeze(y) if coef: return X, y, np.squeeze(ground_truth) else: return X, y def make_circles(n_samples=100, shuffle=True, noise=None, random_state=None, factor=.8): """Make a large circle containing a smaller circle in 2d. A simple toy dataset to visualize clustering and classification algorithms. Parameters ---------- n_samples : int, optional (default=100) The total number of points generated. shuffle: bool, optional (default=True) Whether to shuffle the samples. noise : double or None (default=None) Standard deviation of Gaussian noise added to the data. factor : double < 1 (default=.8) Scale factor between inner and outer circle. Returns ------- X : array of shape [n_samples, 2] The generated samples. y : array of shape [n_samples] The integer labels (0 or 1) for class membership of each sample. """ if factor > 1 or factor < 0: raise ValueError("'factor' has to be between 0 and 1.") generator = check_random_state(random_state) # so as not to have the first point = last point, we add one and then # remove it. linspace = np.linspace(0, 2 * np.pi, n_samples // 2 + 1)[:-1] outer_circ_x = np.cos(linspace) outer_circ_y = np.sin(linspace) inner_circ_x = outer_circ_x * factor inner_circ_y = outer_circ_y * factor X = np.vstack((np.append(outer_circ_x, inner_circ_x), np.append(outer_circ_y, inner_circ_y))).T y = np.hstack([np.zeros(n_samples // 2, dtype=np.intp), np.ones(n_samples // 2, dtype=np.intp)]) if shuffle: X, y = util_shuffle(X, y, random_state=generator) if not noise is None: X += generator.normal(scale=noise, size=X.shape) return X, y def make_moons(n_samples=100, shuffle=True, noise=None, random_state=None): """Make two interleaving half circles A simple toy dataset to visualize clustering and classification algorithms. Parameters ---------- n_samples : int, optional (default=100) The total number of points generated. shuffle : bool, optional (default=True) Whether to shuffle the samples. noise : double or None (default=None) Standard deviation of Gaussian noise added to the data. Returns ------- X : array of shape [n_samples, 2] The generated samples. y : array of shape [n_samples] The integer labels (0 or 1) for class membership of each sample. """ n_samples_out = n_samples // 2 n_samples_in = n_samples - n_samples_out generator = check_random_state(random_state) outer_circ_x = np.cos(np.linspace(0, np.pi, n_samples_out)) outer_circ_y = np.sin(np.linspace(0, np.pi, n_samples_out)) inner_circ_x = 1 - np.cos(np.linspace(0, np.pi, n_samples_in)) inner_circ_y = 1 - np.sin(np.linspace(0, np.pi, n_samples_in)) - .5 X = np.vstack((np.append(outer_circ_x, inner_circ_x), np.append(outer_circ_y, inner_circ_y))).T y = np.hstack([np.zeros(n_samples_in, dtype=np.intp), np.ones(n_samples_out, dtype=np.intp)]) if shuffle: X, y = util_shuffle(X, y, random_state=generator) if not noise is None: X += generator.normal(scale=noise, size=X.shape) return X, y def make_blobs(n_samples=100, n_features=2, centers=3, cluster_std=1.0, center_box=(-10.0, 10.0), shuffle=True, random_state=None): """Generate isotropic Gaussian blobs for clustering. Parameters ---------- n_samples : int, optional (default=100) The total number of points equally divided among clusters. n_features : int, optional (default=2) The number of features for each sample. centers : int or array of shape [n_centers, n_features], optional (default=3) The number of centers to generate, or the fixed center locations. cluster_std: float or sequence of floats, optional (default=1.0) The standard deviation of the clusters. center_box: pair of floats (min, max), optional (default=(-10.0, 10.0)) The bounding box for each cluster center when centers are generated at random. shuffle : boolean, optional (default=True) Shuffle the samples. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Returns ------- X : array of shape [n_samples, n_features] The generated samples. y : array of shape [n_samples] The integer labels for cluster membership of each sample. Examples -------- >>> from sklearn.datasets.samples_generator import make_blobs >>> X, y = make_blobs(n_samples=10, centers=3, n_features=2, ... random_state=0) >>> print(X.shape) (10, 2) >>> y array([0, 0, 1, 0, 2, 2, 2, 1, 1, 0]) See also -------- make_classification: a more intricate variant """ generator = check_random_state(random_state) if isinstance(centers, numbers.Integral): centers = generator.uniform(center_box[0], center_box[1], size=(centers, n_features)) else: centers = check_array(centers) n_features = centers.shape[1] X = [] y = [] n_centers = centers.shape[0] n_samples_per_center = [int(n_samples // n_centers)] * n_centers for i in range(n_samples % n_centers): n_samples_per_center[i] += 1 for i, n in enumerate(n_samples_per_center): X.append(centers[i] + generator.normal(scale=cluster_std, size=(n, n_features))) y += [i] * n X = np.concatenate(X) y = np.array(y) if shuffle: indices = np.arange(n_samples) generator.shuffle(indices) X = X[indices] y = y[indices] return X, y def make_friedman1(n_samples=100, n_features=10, noise=0.0, random_state=None): """Generate the "Friedman \#1" regression problem This dataset is described in Friedman [1] and Breiman [2]. Inputs `X` are independent features uniformly distributed on the interval [0, 1]. The output `y` is created according to the formula:: y(X) = 10 * sin(pi * X[:, 0] * X[:, 1]) + 20 * (X[:, 2] - 0.5) ** 2 \ + 10 * X[:, 3] + 5 * X[:, 4] + noise * N(0, 1). Out of the `n_features` features, only 5 are actually used to compute `y`. The remaining features are independent of `y`. The number of features has to be >= 5. Parameters ---------- n_samples : int, optional (default=100) The number of samples. n_features : int, optional (default=10) The number of features. Should be at least 5. noise : float, optional (default=0.0) The standard deviation of the gaussian noise applied to the output. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Returns ------- X : array of shape [n_samples, n_features] The input samples. y : array of shape [n_samples] The output values. References ---------- .. [1] J. Friedman, "Multivariate adaptive regression splines", The Annals of Statistics 19 (1), pages 1-67, 1991. .. [2] L. Breiman, "Bagging predictors", Machine Learning 24, pages 123-140, 1996. """ if n_features < 5: raise ValueError("n_features must be at least five.") generator = check_random_state(random_state) X = generator.rand(n_samples, n_features) y = 10 * np.sin(np.pi * X[:, 0] * X[:, 1]) + 20 * (X[:, 2] - 0.5) ** 2 \ + 10 * X[:, 3] + 5 * X[:, 4] + noise * generator.randn(n_samples) return X, y def make_friedman2(n_samples=100, noise=0.0, random_state=None): """Generate the "Friedman \#2" regression problem This dataset is described in Friedman [1] and Breiman [2]. Inputs `X` are 4 independent features uniformly distributed on the intervals:: 0 <= X[:, 0] <= 100, 40 * pi <= X[:, 1] <= 560 * pi, 0 <= X[:, 2] <= 1, 1 <= X[:, 3] <= 11. The output `y` is created according to the formula:: y(X) = (X[:, 0] ** 2 + (X[:, 1] * X[:, 2] \ - 1 / (X[:, 1] * X[:, 3])) ** 2) ** 0.5 + noise * N(0, 1). Parameters ---------- n_samples : int, optional (default=100) The number of samples. noise : float, optional (default=0.0) The standard deviation of the gaussian noise applied to the output. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Returns ------- X : array of shape [n_samples, 4] The input samples. y : array of shape [n_samples] The output values. References ---------- .. [1] J. Friedman, "Multivariate adaptive regression splines", The Annals of Statistics 19 (1), pages 1-67, 1991. .. [2] L. Breiman, "Bagging predictors", Machine Learning 24, pages 123-140, 1996. """ generator = check_random_state(random_state) X = generator.rand(n_samples, 4) X[:, 0] *= 100 X[:, 1] *= 520 * np.pi X[:, 1] += 40 * np.pi X[:, 3] *= 10 X[:, 3] += 1 y = (X[:, 0] ** 2 + (X[:, 1] * X[:, 2] - 1 / (X[:, 1] * X[:, 3])) ** 2) ** 0.5 \ + noise * generator.randn(n_samples) return X, y def make_friedman3(n_samples=100, noise=0.0, random_state=None): """Generate the "Friedman \#3" regression problem This dataset is described in Friedman [1] and Breiman [2]. Inputs `X` are 4 independent features uniformly distributed on the intervals:: 0 <= X[:, 0] <= 100, 40 * pi <= X[:, 1] <= 560 * pi, 0 <= X[:, 2] <= 1, 1 <= X[:, 3] <= 11. The output `y` is created according to the formula:: y(X) = arctan((X[:, 1] * X[:, 2] - 1 / (X[:, 1] * X[:, 3])) \ / X[:, 0]) + noise * N(0, 1). Parameters ---------- n_samples : int, optional (default=100) The number of samples. noise : float, optional (default=0.0) The standard deviation of the gaussian noise applied to the output. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Returns ------- X : array of shape [n_samples, 4] The input samples. y : array of shape [n_samples] The output values. References ---------- .. [1] J. Friedman, "Multivariate adaptive regression splines", The Annals of Statistics 19 (1), pages 1-67, 1991. .. [2] L. Breiman, "Bagging predictors", Machine Learning 24, pages 123-140, 1996. """ generator = check_random_state(random_state) X = generator.rand(n_samples, 4) X[:, 0] *= 100 X[:, 1] *= 520 * np.pi X[:, 1] += 40 * np.pi X[:, 3] *= 10 X[:, 3] += 1 y = np.arctan((X[:, 1] * X[:, 2] - 1 / (X[:, 1] * X[:, 3])) / X[:, 0]) \ + noise * generator.randn(n_samples) return X, y def make_low_rank_matrix(n_samples=100, n_features=100, effective_rank=10, tail_strength=0.5, random_state=None): """Generate a mostly low rank matrix with bell-shaped singular values Most of the variance can be explained by a bell-shaped curve of width effective_rank: the low rank part of the singular values profile is:: (1 - tail_strength) * exp(-1.0 * (i / effective_rank) ** 2) The remaining singular values' tail is fat, decreasing as:: tail_strength * exp(-0.1 * i / effective_rank). The low rank part of the profile can be considered the structured signal part of the data while the tail can be considered the noisy part of the data that cannot be summarized by a low number of linear components (singular vectors). This kind of singular profiles is often seen in practice, for instance: - gray level pictures of faces - TF-IDF vectors of text documents crawled from the web Parameters ---------- n_samples : int, optional (default=100) The number of samples. n_features : int, optional (default=100) The number of features. effective_rank : int, optional (default=10) The approximate number of singular vectors required to explain most of the data by linear combinations. tail_strength : float between 0.0 and 1.0, optional (default=0.5) The relative importance of the fat noisy tail of the singular values profile. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Returns ------- X : array of shape [n_samples, n_features] The matrix. """ generator = check_random_state(random_state) n = min(n_samples, n_features) # Random (ortho normal) vectors u, _ = linalg.qr(generator.randn(n_samples, n), mode='economic') v, _ = linalg.qr(generator.randn(n_features, n), mode='economic') # Index of the singular values singular_ind = np.arange(n, dtype=np.float64) # Build the singular profile by assembling signal and noise components low_rank = ((1 - tail_strength) * np.exp(-1.0 * (singular_ind / effective_rank) ** 2)) tail = tail_strength * np.exp(-0.1 * singular_ind / effective_rank) s = np.identity(n) * (low_rank + tail) return np.dot(np.dot(u, s), v.T) def make_sparse_coded_signal(n_samples, n_components, n_features, n_nonzero_coefs, random_state=None): """Generate a signal as a sparse combination of dictionary elements. Returns a matrix Y = DX, such as D is (n_features, n_components), X is (n_components, n_samples) and each column of X has exactly n_nonzero_coefs non-zero elements. Parameters ---------- n_samples : int number of samples to generate n_components: int, number of components in the dictionary n_features : int number of features of the dataset to generate n_nonzero_coefs : int number of active (non-zero) coefficients in each sample random_state: int or RandomState instance, optional (default=None) seed used by the pseudo random number generator Returns ------- data: array of shape [n_features, n_samples] The encoded signal (Y). dictionary: array of shape [n_features, n_components] The dictionary with normalized components (D). code: array of shape [n_components, n_samples] The sparse code such that each column of this matrix has exactly n_nonzero_coefs non-zero items (X). """ generator = check_random_state(random_state) # generate dictionary D = generator.randn(n_features, n_components) D /= np.sqrt(np.sum((D ** 2), axis=0)) # generate code X = np.zeros((n_components, n_samples)) for i in range(n_samples): idx = np.arange(n_components) generator.shuffle(idx) idx = idx[:n_nonzero_coefs] X[idx, i] = generator.randn(n_nonzero_coefs) # encode signal Y = np.dot(D, X) return map(np.squeeze, (Y, D, X)) def make_sparse_uncorrelated(n_samples=100, n_features=10, random_state=None): """Generate a random regression problem with sparse uncorrelated design This dataset is described in Celeux et al [1]. as:: X ~ N(0, 1) y(X) = X[:, 0] + 2 * X[:, 1] - 2 * X[:, 2] - 1.5 * X[:, 3] Only the first 4 features are informative. The remaining features are useless. Parameters ---------- n_samples : int, optional (default=100) The number of samples. n_features : int, optional (default=10) The number of features. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Returns ------- X : array of shape [n_samples, n_features] The input samples. y : array of shape [n_samples] The output values. References ---------- .. [1] G. Celeux, M. El Anbari, J.-M. Marin, C. P. Robert, "Regularization in regression: comparing Bayesian and frequentist methods in a poorly informative situation", 2009. """ generator = check_random_state(random_state) X = generator.normal(loc=0, scale=1, size=(n_samples, n_features)) y = generator.normal(loc=(X[:, 0] + 2 * X[:, 1] - 2 * X[:, 2] - 1.5 * X[:, 3]), scale=np.ones(n_samples)) return X, y def make_spd_matrix(n_dim, random_state=None): """Generate a random symmetric, positive-definite matrix. Parameters ---------- n_dim : int The matrix dimension. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Returns ------- X : array of shape [n_dim, n_dim] The random symmetric, positive-definite matrix. See also -------- make_sparse_spd_matrix """ generator = check_random_state(random_state) A = generator.rand(n_dim, n_dim) U, s, V = linalg.svd(np.dot(A.T, A)) X = np.dot(np.dot(U, 1.0 + np.diag(generator.rand(n_dim))), V) return X def make_sparse_spd_matrix(dim=1, alpha=0.95, norm_diag=False, smallest_coef=.1, largest_coef=.9, random_state=None): """Generate a sparse symmetric definite positive matrix. Parameters ---------- dim: integer, optional (default=1) The size of the random (matrix to generate. alpha: float between 0 and 1, optional (default=0.95) The probability that a coefficient is non zero (see notes). random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Returns ------- prec: array of shape = [dim, dim] Notes ----- The sparsity is actually imposed on the cholesky factor of the matrix. Thus alpha does not translate directly into the filling fraction of the matrix itself. See also -------- make_spd_matrix """ random_state = check_random_state(random_state) chol = -np.eye(dim) aux = random_state.rand(dim, dim) aux[aux < alpha] = 0 aux[aux > alpha] = (smallest_coef + (largest_coef - smallest_coef) * random_state.rand(np.sum(aux > alpha))) aux = np.tril(aux, k=-1) # Permute the lines: we don't want to have asymmetries in the final # SPD matrix permutation = random_state.permutation(dim) aux = aux[permutation].T[permutation] chol += aux prec = np.dot(chol.T, chol) if norm_diag: d = np.diag(prec) d = 1. / np.sqrt(d) prec *= d prec *= d[:, np.newaxis] return prec def make_swiss_roll(n_samples=100, noise=0.0, random_state=None): """Generate a swiss roll dataset. Parameters ---------- n_samples : int, optional (default=100) The number of sample points on the S curve. noise : float, optional (default=0.0) The standard deviation of the gaussian noise. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Returns ------- X : array of shape [n_samples, 3] The points. t : array of shape [n_samples] The univariate position of the sample according to the main dimension of the points in the manifold. Notes ----- The algorithm is from Marsland [1]. References ---------- .. [1] S. Marsland, "Machine Learning: An Algorithmic Perpsective", Chapter 10, 2009. http://www-ist.massey.ac.nz/smarsland/Code/10/lle.py """ generator = check_random_state(random_state) t = 1.5 * np.pi * (1 + 2 * generator.rand(1, n_samples)) x = t * np.cos(t) y = 21 * generator.rand(1, n_samples) z = t * np.sin(t) X = np.concatenate((x, y, z)) X += noise * generator.randn(3, n_samples) X = X.T t = np.squeeze(t) return X, t def make_s_curve(n_samples=100, noise=0.0, random_state=None): """Generate an S curve dataset. Parameters ---------- n_samples : int, optional (default=100) The number of sample points on the S curve. noise : float, optional (default=0.0) The standard deviation of the gaussian noise. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Returns ------- X : array of shape [n_samples, 3] The points. t : array of shape [n_samples] The univariate position of the sample according to the main dimension of the points in the manifold. """ generator = check_random_state(random_state) t = 3 * np.pi * (generator.rand(1, n_samples) - 0.5) x = np.sin(t) y = 2.0 * generator.rand(1, n_samples) z = np.sign(t) * (np.cos(t) - 1) X = np.concatenate((x, y, z)) X += noise * generator.randn(3, n_samples) X = X.T t = np.squeeze(t) return X, t def make_gaussian_quantiles(mean=None, cov=1., n_samples=100, n_features=2, n_classes=3, shuffle=True, random_state=None): """Generate isotropic Gaussian and label samples by quantile This classification dataset is constructed by taking a multi-dimensional standard normal distribution and defining classes separated by nested concentric multi-dimensional spheres such that roughly equal numbers of samples are in each class (quantiles of the :math:`\chi^2` distribution). Parameters ---------- mean : array of shape [n_features], optional (default=None) The mean of the multi-dimensional normal distribution. If None then use the origin (0, 0, ...). cov : float, optional (default=1.) The covariance matrix will be this value times the unit matrix. This dataset only produces symmetric normal distributions. n_samples : int, optional (default=100) The total number of points equally divided among classes. n_features : int, optional (default=2) The number of features for each sample. n_classes : int, optional (default=3) The number of classes shuffle : boolean, optional (default=True) Shuffle the samples. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Returns ------- X : array of shape [n_samples, n_features] The generated samples. y : array of shape [n_samples] The integer labels for quantile membership of each sample. Notes ----- The dataset is from Zhu et al [1]. References ---------- .. [1] J. Zhu, H. Zou, S. Rosset, T. Hastie, "Multi-class AdaBoost", 2009. """ if n_samples < n_classes: raise ValueError("n_samples must be at least n_classes") generator = check_random_state(random_state) if mean is None: mean = np.zeros(n_features) else: mean = np.array(mean) # Build multivariate normal distribution X = generator.multivariate_normal(mean, cov * np.identity(n_features), (n_samples,)) # Sort by distance from origin idx = np.argsort(np.sum((X - mean[np.newaxis, :]) ** 2, axis=1)) X = X[idx, :] # Label by quantile step = n_samples // n_classes y = np.hstack([np.repeat(np.arange(n_classes), step), np.repeat(n_classes - 1, n_samples - step * n_classes)]) if shuffle: X, y = util_shuffle(X, y, random_state=generator) return X, y def _shuffle(data, random_state=None): generator = check_random_state(random_state) n_rows, n_cols = data.shape row_idx = generator.permutation(n_rows) col_idx = generator.permutation(n_cols) result = data[row_idx][:, col_idx] return result, row_idx, col_idx def make_biclusters(shape, n_clusters, noise=0.0, minval=10, maxval=100, shuffle=True, random_state=None): """Generate an array with constant block diagonal structure for biclustering. Parameters ---------- shape : iterable (n_rows, n_cols) The shape of the result. n_clusters : integer The number of biclusters. noise : float, optional (default=0.0) The standard deviation of the gaussian noise. minval : int, optional (default=10) Minimum value of a bicluster. maxval : int, optional (default=100) Maximum value of a bicluster. shuffle : boolean, optional (default=True) Shuffle the samples. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Returns ------- X : array of shape `shape` The generated array. rows : array of shape (n_clusters, X.shape[0],) The indicators for cluster membership of each row. cols : array of shape (n_clusters, X.shape[1],) The indicators for cluster membership of each column. References ---------- .. [1] Dhillon, I. S. (2001, August). Co-clustering documents and words using bipartite spectral graph partitioning. In Proceedings of the seventh ACM SIGKDD international conference on Knowledge discovery and data mining (pp. 269-274). ACM. See also -------- make_checkerboard """ generator = check_random_state(random_state) n_rows, n_cols = shape consts = generator.uniform(minval, maxval, n_clusters) # row and column clusters of approximately equal sizes row_sizes = generator.multinomial(n_rows, np.repeat(1.0 / n_clusters, n_clusters)) col_sizes = generator.multinomial(n_cols, np.repeat(1.0 / n_clusters, n_clusters)) row_labels = np.hstack(list(np.repeat(val, rep) for val, rep in zip(range(n_clusters), row_sizes))) col_labels = np.hstack(list(np.repeat(val, rep) for val, rep in zip(range(n_clusters), col_sizes))) result = np.zeros(shape, dtype=np.float64) for i in range(n_clusters): selector = np.outer(row_labels == i, col_labels == i) result[selector] += consts[i] if noise > 0: result += generator.normal(scale=noise, size=result.shape) if shuffle: result, row_idx, col_idx = _shuffle(result, random_state) row_labels = row_labels[row_idx] col_labels = col_labels[col_idx] rows = np.vstack(row_labels == c for c in range(n_clusters)) cols = np.vstack(col_labels == c for c in range(n_clusters)) return result, rows, cols def make_checkerboard(shape, n_clusters, noise=0.0, minval=10, maxval=100, shuffle=True, random_state=None): """Generate an array with block checkerboard structure for biclustering. Parameters ---------- shape : iterable (n_rows, n_cols) The shape of the result. n_clusters : integer or iterable (n_row_clusters, n_column_clusters) The number of row and column clusters. noise : float, optional (default=0.0) The standard deviation of the gaussian noise. minval : int, optional (default=10) Minimum value of a bicluster. maxval : int, optional (default=100) Maximum value of a bicluster. shuffle : boolean, optional (default=True) Shuffle the samples. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Returns ------- X : array of shape `shape` The generated array. rows : array of shape (n_clusters, X.shape[0],) The indicators for cluster membership of each row. cols : array of shape (n_clusters, X.shape[1],) The indicators for cluster membership of each column. References ---------- .. [1] Kluger, Y., Basri, R., Chang, J. T., & Gerstein, M. (2003). Spectral biclustering of microarray data: coclustering genes and conditions. Genome research, 13(4), 703-716. See also -------- make_biclusters """ generator = check_random_state(random_state) if hasattr(n_clusters, "__len__"): n_row_clusters, n_col_clusters = n_clusters else: n_row_clusters = n_col_clusters = n_clusters # row and column clusters of approximately equal sizes n_rows, n_cols = shape row_sizes = generator.multinomial(n_rows, np.repeat(1.0 / n_row_clusters, n_row_clusters)) col_sizes = generator.multinomial(n_cols, np.repeat(1.0 / n_col_clusters, n_col_clusters)) row_labels = np.hstack(list(np.repeat(val, rep) for val, rep in zip(range(n_row_clusters), row_sizes))) col_labels = np.hstack(list(np.repeat(val, rep) for val, rep in zip(range(n_col_clusters), col_sizes))) result = np.zeros(shape, dtype=np.float64) for i in range(n_row_clusters): for j in range(n_col_clusters): selector = np.outer(row_labels == i, col_labels == j) result[selector] += generator.uniform(minval, maxval) if noise > 0: result += generator.normal(scale=noise, size=result.shape) if shuffle: result, row_idx, col_idx = _shuffle(result, random_state) row_labels = row_labels[row_idx] col_labels = col_labels[col_idx] rows = np.vstack(row_labels == label for label in range(n_row_clusters) for _ in range(n_col_clusters)) cols = np.vstack(col_labels == label for _ in range(n_row_clusters) for label in range(n_col_clusters)) return result, rows, cols
bsd-3-clause
plotly/python-api
packages/python/plotly/plotly/graph_objs/scatterpolargl/hoverlabel/_font.py
2
11245
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType import copy as _copy class Font(_BaseTraceHierarchyType): # class properties # -------------------- _parent_path_str = "scatterpolargl.hoverlabel" _path_str = "scatterpolargl.hoverlabel.font" _valid_props = {"color", "colorsrc", "family", "familysrc", "size", "sizesrc"} # color # ----- @property def color(self): """ The 'color' property is a color and may be specified as: - A hex string (e.g. '#ff0000') - An rgb/rgba string (e.g. 'rgb(255,0,0)') - An hsl/hsla string (e.g. 'hsl(0,100%,50%)') - An hsv/hsva string (e.g. 'hsv(0,100%,100%)') - A named CSS color: aliceblue, antiquewhite, aqua, aquamarine, azure, beige, bisque, black, blanchedalmond, blue, blueviolet, brown, burlywood, cadetblue, chartreuse, chocolate, coral, cornflowerblue, cornsilk, crimson, cyan, darkblue, darkcyan, darkgoldenrod, darkgray, darkgrey, darkgreen, darkkhaki, darkmagenta, darkolivegreen, darkorange, darkorchid, darkred, darksalmon, darkseagreen, darkslateblue, darkslategray, darkslategrey, darkturquoise, darkviolet, deeppink, deepskyblue, dimgray, dimgrey, dodgerblue, firebrick, floralwhite, forestgreen, fuchsia, gainsboro, ghostwhite, gold, goldenrod, gray, grey, green, greenyellow, honeydew, hotpink, indianred, indigo, ivory, khaki, lavender, lavenderblush, lawngreen, lemonchiffon, lightblue, lightcoral, lightcyan, lightgoldenrodyellow, lightgray, lightgrey, lightgreen, lightpink, lightsalmon, lightseagreen, lightskyblue, lightslategray, lightslategrey, lightsteelblue, lightyellow, lime, limegreen, linen, magenta, maroon, mediumaquamarine, mediumblue, mediumorchid, mediumpurple, mediumseagreen, mediumslateblue, mediumspringgreen, mediumturquoise, mediumvioletred, midnightblue, mintcream, mistyrose, moccasin, navajowhite, navy, oldlace, olive, olivedrab, orange, orangered, orchid, palegoldenrod, palegreen, paleturquoise, palevioletred, papayawhip, peachpuff, peru, pink, plum, powderblue, purple, red, rosybrown, royalblue, rebeccapurple, saddlebrown, salmon, sandybrown, seagreen, seashell, sienna, silver, skyblue, slateblue, slategray, slategrey, snow, springgreen, steelblue, tan, teal, thistle, tomato, turquoise, violet, wheat, white, whitesmoke, yellow, yellowgreen - A list or array of any of the above Returns ------- str|numpy.ndarray """ return self["color"] @color.setter def color(self, val): self["color"] = val # colorsrc # -------- @property def colorsrc(self): """ Sets the source reference on Chart Studio Cloud for color . The 'colorsrc' property must be specified as a string or as a plotly.grid_objs.Column object Returns ------- str """ return self["colorsrc"] @colorsrc.setter def colorsrc(self, val): self["colorsrc"] = val # family # ------ @property def family(self): """ HTML font family - the typeface that will be applied by the web browser. The web browser will only be able to apply a font if it is available on the system which it operates. Provide multiple font families, separated by commas, to indicate the preference in which to apply fonts if they aren't available on the system. The Chart Studio Cloud (at https://chart- studio.plotly.com or on-premise) generates images on a server, where only a select number of fonts are installed and supported. These include "Arial", "Balto", "Courier New", "Droid Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas One", "Old Standard TT", "Open Sans", "Overpass", "PT Sans Narrow", "Raleway", "Times New Roman". The 'family' property is a string and must be specified as: - A non-empty string - A tuple, list, or one-dimensional numpy array of the above Returns ------- str|numpy.ndarray """ return self["family"] @family.setter def family(self, val): self["family"] = val # familysrc # --------- @property def familysrc(self): """ Sets the source reference on Chart Studio Cloud for family . The 'familysrc' property must be specified as a string or as a plotly.grid_objs.Column object Returns ------- str """ return self["familysrc"] @familysrc.setter def familysrc(self, val): self["familysrc"] = val # size # ---- @property def size(self): """ The 'size' property is a number and may be specified as: - An int or float in the interval [1, inf] - A tuple, list, or one-dimensional numpy array of the above Returns ------- int|float|numpy.ndarray """ return self["size"] @size.setter def size(self, val): self["size"] = val # sizesrc # ------- @property def sizesrc(self): """ Sets the source reference on Chart Studio Cloud for size . The 'sizesrc' property must be specified as a string or as a plotly.grid_objs.Column object Returns ------- str """ return self["sizesrc"] @sizesrc.setter def sizesrc(self, val): self["sizesrc"] = val # Self properties description # --------------------------- @property def _prop_descriptions(self): return """\ color colorsrc Sets the source reference on Chart Studio Cloud for color . family HTML font family - the typeface that will be applied by the web browser. The web browser will only be able to apply a font if it is available on the system which it operates. Provide multiple font families, separated by commas, to indicate the preference in which to apply fonts if they aren't available on the system. The Chart Studio Cloud (at https://chart-studio.plotly.com or on- premise) generates images on a server, where only a select number of fonts are installed and supported. These include "Arial", "Balto", "Courier New", "Droid Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas One", "Old Standard TT", "Open Sans", "Overpass", "PT Sans Narrow", "Raleway", "Times New Roman". familysrc Sets the source reference on Chart Studio Cloud for family . size sizesrc Sets the source reference on Chart Studio Cloud for size . """ def __init__( self, arg=None, color=None, colorsrc=None, family=None, familysrc=None, size=None, sizesrc=None, **kwargs ): """ Construct a new Font object Sets the font used in hover labels. Parameters ---------- arg dict of properties compatible with this constructor or an instance of :class:`plotly.graph_objs.scatterpolargl .hoverlabel.Font` color colorsrc Sets the source reference on Chart Studio Cloud for color . family HTML font family - the typeface that will be applied by the web browser. The web browser will only be able to apply a font if it is available on the system which it operates. Provide multiple font families, separated by commas, to indicate the preference in which to apply fonts if they aren't available on the system. The Chart Studio Cloud (at https://chart-studio.plotly.com or on- premise) generates images on a server, where only a select number of fonts are installed and supported. These include "Arial", "Balto", "Courier New", "Droid Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas One", "Old Standard TT", "Open Sans", "Overpass", "PT Sans Narrow", "Raleway", "Times New Roman". familysrc Sets the source reference on Chart Studio Cloud for family . size sizesrc Sets the source reference on Chart Studio Cloud for size . Returns ------- Font """ super(Font, self).__init__("font") if "_parent" in kwargs: self._parent = kwargs["_parent"] return # Validate arg # ------------ if arg is None: arg = {} elif isinstance(arg, self.__class__): arg = arg.to_plotly_json() elif isinstance(arg, dict): arg = _copy.copy(arg) else: raise ValueError( """\ The first argument to the plotly.graph_objs.scatterpolargl.hoverlabel.Font constructor must be a dict or an instance of :class:`plotly.graph_objs.scatterpolargl.hoverlabel.Font`""" ) # Handle skip_invalid # ------------------- self._skip_invalid = kwargs.pop("skip_invalid", False) self._validate = kwargs.pop("_validate", True) # Populate data dict with properties # ---------------------------------- _v = arg.pop("color", None) _v = color if color is not None else _v if _v is not None: self["color"] = _v _v = arg.pop("colorsrc", None) _v = colorsrc if colorsrc is not None else _v if _v is not None: self["colorsrc"] = _v _v = arg.pop("family", None) _v = family if family is not None else _v if _v is not None: self["family"] = _v _v = arg.pop("familysrc", None) _v = familysrc if familysrc is not None else _v if _v is not None: self["familysrc"] = _v _v = arg.pop("size", None) _v = size if size is not None else _v if _v is not None: self["size"] = _v _v = arg.pop("sizesrc", None) _v = sizesrc if sizesrc is not None else _v if _v is not None: self["sizesrc"] = _v # Process unknown kwargs # ---------------------- self._process_kwargs(**dict(arg, **kwargs)) # Reset skip_invalid # ------------------ self._skip_invalid = False
mit