repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
meh/servo | tests/wpt/web-platform-tests/dom/nodes/Document-createElement-namespace-tests/generate.py | 226 | 2091 | #!/usr/bin/python
import os
import sys
THIS_NAME = "generate.py"
# Note: these lists must be kept in sync with the lists in
# Document-createElement-namespace.html, and this script must be run whenever
# the lists are updated. (We could keep the lists in a shared JSON file, but
# seems like too much effort.)
FILES = (
("empty", ""),
("minimal_html", "<!doctype html><title></title>"),
("xhtml", '<html xmlns="http://www.w3.org/1999/xhtml"></html>'),
("svg", '<svg xmlns="http://www.w3.org/2000/svg"></svg>'),
("mathml", '<mathml xmlns="http://www.w3.org/1998/Math/MathML"></mathml>'),
("bare_xhtml", "<html></html>"),
("bare_svg", "<svg></svg>"),
("bare_mathml", "<math></math>"),
("xhtml_ns_removed", """\
<html xmlns="http://www.w3.org/1999/xhtml">
<head><script>
var newRoot = document.createElementNS(null, "html");
document.removeChild(document.documentElement);
document.appendChild(newRoot);
</script></head>
</html>
"""),
("xhtml_ns_changed", """\
<html xmlns="http://www.w3.org/1999/xhtml">
<head><script>
var newRoot = document.createElementNS("http://www.w3.org/2000/svg", "abc");
document.removeChild(document.documentElement);
document.appendChild(newRoot);
</script></head>
</html>
"""),
)
EXTENSIONS = (
"html",
"xhtml",
"xml",
"svg",
# Was not able to get server MIME type working properly :(
#"mml",
)
def __main__():
if len(sys.argv) > 1:
print "No arguments expected, aborting"
return
if not os.access(THIS_NAME, os.F_OK):
print "Must be run from the directory of " + THIS_NAME + ", aborting"
return
for name in os.listdir("."):
if name == THIS_NAME:
continue
os.remove(name)
manifest = open("MANIFEST", "w")
for name, contents in FILES:
for extension in EXTENSIONS:
f = open(name + "." + extension, "w")
f.write(contents)
f.close()
manifest.write("support " + name + "." + extension + "\n")
manifest.close()
__main__()
| mpl-2.0 |
sbellem/django | django/contrib/contenttypes/models.py | 273 | 7798 | from __future__ import unicode_literals
import warnings
from django.apps import apps
from django.db import models
from django.db.utils import IntegrityError, OperationalError, ProgrammingError
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
class ContentTypeManager(models.Manager):
use_in_migrations = True
# Cache to avoid re-looking up ContentType objects all over the place.
# This cache is shared by all the get_for_* methods.
_cache = {}
def get_by_natural_key(self, app_label, model):
try:
ct = self.__class__._cache[self.db][(app_label, model)]
except KeyError:
ct = self.get(app_label=app_label, model=model)
self._add_to_cache(self.db, ct)
return ct
def _get_opts(self, model, for_concrete_model):
if for_concrete_model:
model = model._meta.concrete_model
elif model._deferred:
model = model._meta.proxy_for_model
return model._meta
def _get_from_cache(self, opts):
key = (opts.app_label, opts.model_name)
return self.__class__._cache[self.db][key]
def create(self, **kwargs):
if 'name' in kwargs:
del kwargs['name']
warnings.warn(
"ContentType.name field doesn't exist any longer. Please remove it from your code.",
RemovedInDjango110Warning, stacklevel=2)
return super(ContentTypeManager, self).create(**kwargs)
def get_for_model(self, model, for_concrete_model=True):
"""
Returns the ContentType object for a given model, creating the
ContentType if necessary. Lookups are cached so that subsequent lookups
for the same model don't hit the database.
"""
opts = self._get_opts(model, for_concrete_model)
try:
return self._get_from_cache(opts)
except KeyError:
pass
# The ContentType entry was not found in the cache, therefore we
# proceed to load or create it.
try:
try:
# We start with get() and not get_or_create() in order to use
# the db_for_read (see #20401).
ct = self.get(app_label=opts.app_label, model=opts.model_name)
except self.model.DoesNotExist:
# Not found in the database; we proceed to create it. This time we
# use get_or_create to take care of any race conditions.
ct, created = self.get_or_create(
app_label=opts.app_label,
model=opts.model_name,
)
except (OperationalError, ProgrammingError, IntegrityError):
# It's possible to migrate a single app before contenttypes,
# as it's not a required initial dependency (it's contrib!)
# Have a nice error for this.
raise RuntimeError(
"Error creating new content types. Please make sure contenttypes "
"is migrated before trying to migrate apps individually."
)
self._add_to_cache(self.db, ct)
return ct
def get_for_models(self, *models, **kwargs):
"""
Given *models, returns a dictionary mapping {model: content_type}.
"""
for_concrete_models = kwargs.pop('for_concrete_models', True)
# Final results
results = {}
# models that aren't already in the cache
needed_app_labels = set()
needed_models = set()
needed_opts = set()
for model in models:
opts = self._get_opts(model, for_concrete_models)
try:
ct = self._get_from_cache(opts)
except KeyError:
needed_app_labels.add(opts.app_label)
needed_models.add(opts.model_name)
needed_opts.add(opts)
else:
results[model] = ct
if needed_opts:
cts = self.filter(
app_label__in=needed_app_labels,
model__in=needed_models
)
for ct in cts:
model = ct.model_class()
if model._meta in needed_opts:
results[model] = ct
needed_opts.remove(model._meta)
self._add_to_cache(self.db, ct)
for opts in needed_opts:
# These weren't in the cache, or the DB, create them.
ct = self.create(
app_label=opts.app_label,
model=opts.model_name,
)
self._add_to_cache(self.db, ct)
results[ct.model_class()] = ct
return results
def get_for_id(self, id):
"""
Lookup a ContentType by ID. Uses the same shared cache as get_for_model
(though ContentTypes are obviously not created on-the-fly by get_by_id).
"""
try:
ct = self.__class__._cache[self.db][id]
except KeyError:
# This could raise a DoesNotExist; that's correct behavior and will
# make sure that only correct ctypes get stored in the cache dict.
ct = self.get(pk=id)
self._add_to_cache(self.db, ct)
return ct
def clear_cache(self):
"""
Clear out the content-type cache. This needs to happen during database
flushes to prevent caching of "stale" content type IDs (see
django.contrib.contenttypes.management.update_contenttypes for where
this gets called).
"""
self.__class__._cache.clear()
def _add_to_cache(self, using, ct):
"""Insert a ContentType into the cache."""
# Note it's possible for ContentType objects to be stale; model_class() will return None.
# Hence, there is no reliance on model._meta.app_label here, just using the model fields instead.
key = (ct.app_label, ct.model)
self.__class__._cache.setdefault(using, {})[key] = ct
self.__class__._cache.setdefault(using, {})[ct.id] = ct
@python_2_unicode_compatible
class ContentType(models.Model):
app_label = models.CharField(max_length=100)
model = models.CharField(_('python model class name'), max_length=100)
objects = ContentTypeManager()
class Meta:
verbose_name = _('content type')
verbose_name_plural = _('content types')
db_table = 'django_content_type'
unique_together = (('app_label', 'model'),)
def __str__(self):
return self.name
@property
def name(self):
model = self.model_class()
if not model:
return self.model
return force_text(model._meta.verbose_name)
def model_class(self):
"Returns the Python model class for this type of content."
try:
return apps.get_model(self.app_label, self.model)
except LookupError:
return None
def get_object_for_this_type(self, **kwargs):
"""
Returns an object of this type for the keyword arguments given.
Basically, this is a proxy around this object_type's get_object() model
method. The ObjectNotExist exception, if thrown, will not be caught,
so code that calls this method should catch it.
"""
return self.model_class()._base_manager.using(self._state.db).get(**kwargs)
def get_all_objects_for_this_type(self, **kwargs):
"""
Returns all objects of this type for the keyword arguments given.
"""
return self.model_class()._base_manager.using(self._state.db).filter(**kwargs)
def natural_key(self):
return (self.app_label, self.model)
| bsd-3-clause |
popazerty/dvbapp-gui2 | lib/python/Components/config.py | 6 | 54005 | from enigma import getPrevAsciiCode
from Tools.NumericalTextInput import NumericalTextInput
from Tools.Directories import resolveFilename, SCOPE_CONFIG, fileExists
from Components.Harddisk import harddiskmanager
from copy import copy as copy_copy
from os import path as os_path
from time import localtime, strftime
# ConfigElement, the base class of all ConfigElements.
# it stores:
# value the current value, usefully encoded.
# usually a property which retrieves _value,
# and maybe does some reformatting
# _value the value as it's going to be saved in the configfile,
# though still in non-string form.
# this is the object which is actually worked on.
# default the initial value. If _value is equal to default,
# it will not be stored in the config file
# saved_value is a text representation of _value, stored in the config file
#
# and has (at least) the following methods:
# save() stores _value into saved_value,
# (or stores 'None' if it should not be stored)
# load() loads _value from saved_value, or loads
# the default if saved_value is 'None' (default)
# or invalid.
#
class ConfigElement(object):
def __init__(self):
self.extra_args = {}
self.saved_value = None
self.save_forced = False
self.last_value = None
self.save_disabled = False
self.__notifiers = None
self.__notifiers_final = None
self.enabled = True
self.callNotifiersOnSaveAndCancel = False
def getNotifiers(self):
if self.__notifiers is None:
self.__notifiers = [ ]
return self.__notifiers
def setNotifiers(self, val):
self.__notifiers = val
notifiers = property(getNotifiers, setNotifiers)
def getNotifiersFinal(self):
if self.__notifiers_final is None:
self.__notifiers_final = [ ]
return self.__notifiers_final
def setNotifiersFinal(self, val):
self.__notifiers_final = val
notifiers_final = property(getNotifiersFinal, setNotifiersFinal)
# you need to override this to do input validation
def setValue(self, value):
self._value = value
self.changed()
def getValue(self):
return self._value
value = property(getValue, setValue)
# you need to override this if self.value is not a string
def fromstring(self, value):
return value
# you can overide this for fancy default handling
def load(self):
sv = self.saved_value
if sv is None:
self.value = self.default
else:
self.value = self.fromstring(sv)
def tostring(self, value):
return str(value)
# you need to override this if str(self.value) doesn't work
def save(self):
if self.save_disabled or (self.value == self.default and not self.save_forced):
self.saved_value = None
else:
self.saved_value = self.tostring(self.value)
if self.callNotifiersOnSaveAndCancel:
self.changed()
def cancel(self):
self.load()
if self.callNotifiersOnSaveAndCancel:
self.changed()
def isChanged(self):
sv = self.saved_value
if sv is None and self.value == self.default:
return False
return self.tostring(self.value) != sv
def changed(self):
if self.__notifiers:
for x in self.notifiers:
try:
if self.extra_args[x]:
x(self, self.extra_args[x])
else:
x(self)
except:
x(self)
def changedFinal(self):
if self.__notifiers_final:
for x in self.notifiers_final:
try:
if self.extra_args[x]:
x(self, self.extra_args[x])
else:
x(self)
except:
x(self)
def addNotifier(self, notifier, initial_call = True, immediate_feedback = True, extra_args=None):
if not extra_args: extra_args = []
assert callable(notifier), "notifiers must be callable"
try:
self.extra_args[notifier] = extra_args
except: pass
if immediate_feedback:
self.notifiers.append(notifier)
else:
self.notifiers_final.append(notifier)
# CHECKME:
# do we want to call the notifier
# - at all when adding it? (yes, though optional)
# - when the default is active? (yes)
# - when no value *yet* has been set,
# because no config has ever been read (currently yes)
# (though that's not so easy to detect.
# the entry could just be new.)
if initial_call:
if extra_args:
notifier(self,extra_args)
else:
notifier(self)
def removeNotifier(self, notifier, initial_call = True, immediate_feedback = True):
assert callable(notifier), "notifiers must be callable"
if immediate_feedback:
self.notifiers.remove(notifier)
else:
self.notifiers_final.remove(notifier)
def disableSave(self):
self.save_disabled = True
def __call__(self, selected):
return self.getMulti(selected)
def onSelect(self, session):
pass
def onDeselect(self, session):
if not self.last_value == self.value:
self.changedFinal()
self.last_value = self.value
KEY_LEFT = 0
KEY_RIGHT = 1
KEY_OK = 2
KEY_DELETE = 3
KEY_BACKSPACE = 4
KEY_HOME = 5
KEY_END = 6
KEY_TOGGLEOW = 7
KEY_ASCII = 8
KEY_TIMEOUT = 9
KEY_NUMBERS = range(12, 12+10)
KEY_0 = 12
KEY_9 = 12+9
def getKeyNumber(key):
assert key in KEY_NUMBERS
return key - KEY_0
class choicesList(object): # XXX: we might want a better name for this
LIST_TYPE_LIST = 1
LIST_TYPE_DICT = 2
def __init__(self, choices, type = None):
self.choices = choices
if type is None:
if isinstance(choices, list):
self.type = choicesList.LIST_TYPE_LIST
elif isinstance(choices, dict):
self.type = choicesList.LIST_TYPE_DICT
else:
assert False, "choices must be dict or list!"
else:
self.type = type
def __list__(self):
if self.type == choicesList.LIST_TYPE_LIST:
ret = [not isinstance(x, tuple) and x or x[0] for x in self.choices]
else:
ret = self.choices.keys()
return ret or [""]
def __iter__(self):
if self.type == choicesList.LIST_TYPE_LIST:
ret = [not isinstance(x, tuple) and x or x[0] for x in self.choices]
else:
ret = self.choices
return iter(ret or [""])
def __len__(self):
return len(self.choices) or 1
def __getitem__(self, index):
if self.type == choicesList.LIST_TYPE_LIST:
ret = self.choices[index]
if isinstance(ret, tuple):
ret = ret[0]
return ret
return self.choices.keys()[index]
def index(self, value):
try:
return self.__list__().index(value)
except (ValueError, IndexError):
# occurs e.g. when default is not in list
return 0
def __setitem__(self, index, value):
if self.type == choicesList.LIST_TYPE_LIST:
orig = self.choices[index]
if isinstance(orig, tuple):
self.choices[index] = (value, orig[1])
else:
self.choices[index] = value
else:
key = self.choices.keys()[index]
orig = self.choices[key]
del self.choices[key]
self.choices[value] = orig
def default(self):
choices = self.choices
if not choices:
return ""
if self.type is choicesList.LIST_TYPE_LIST:
default = choices[0]
if isinstance(default, tuple):
default = default[0]
else:
default = choices.keys()[0]
return default
class descriptionList(choicesList): # XXX: we might want a better name for this
def __list__(self):
if self.type == choicesList.LIST_TYPE_LIST:
ret = [not isinstance(x, tuple) and x or x[1] for x in self.choices]
else:
ret = self.choices.values()
return ret or [""]
def __iter__(self):
return iter(self.__list__())
def __getitem__(self, index):
if self.type == choicesList.LIST_TYPE_LIST:
for x in self.choices:
if isinstance(x, tuple):
if x[0] == index:
return str(x[1])
elif x == index:
return str(x)
return str(index) # Fallback!
else:
return str(self.choices.get(index, ""))
def __setitem__(self, index, value):
if self.type == choicesList.LIST_TYPE_LIST:
i = self.index(index)
orig = self.choices[i]
if isinstance(orig, tuple):
self.choices[i] = (orig[0], value)
else:
self.choices[i] = value
else:
self.choices[index] = value
#
# ConfigSelection is a "one of.."-type.
# it has the "choices", usually a list, which contains
# (id, desc)-tuples (or just only the ids, in case the id
# will be used as description)
#
# all ids MUST be plain strings.
#
class ConfigSelection(ConfigElement):
def __init__(self, choices, default = None):
ConfigElement.__init__(self)
self.choices = choicesList(choices)
if default is None:
default = self.choices.default()
self._descr = None
self.default = self._value = self.last_value = default
def setChoices(self, choices, default = None):
self.choices = choicesList(choices)
if default is None:
default = self.choices.default()
self.default = default
if self.value not in self.choices:
self.value = default
def setValue(self, value):
if value in self.choices:
self._value = value
else:
self._value = self.default
self._descr = None
self.changed()
def tostring(self, val):
return val
def getValue(self):
return self._value
def setCurrentText(self, text):
i = self.choices.index(self.value)
self.choices[i] = text
self._descr = self.description[text] = text
self._value = text
value = property(getValue, setValue)
def getIndex(self):
return self.choices.index(self.value)
index = property(getIndex)
# GUI
def handleKey(self, key):
nchoices = len(self.choices)
if nchoices > 1:
i = self.choices.index(self.value)
if key == KEY_LEFT:
self.value = self.choices[(i + nchoices - 1) % nchoices]
elif key == KEY_RIGHT:
self.value = self.choices[(i + 1) % nchoices]
elif key == KEY_HOME:
self.value = self.choices[0]
elif key == KEY_END:
self.value = self.choices[nchoices - 1]
def selectNext(self):
nchoices = len(self.choices)
i = self.choices.index(self.value)
self.value = self.choices[(i + 1) % nchoices]
def getText(self):
if self._descr is not None:
return self._descr
descr = self._descr = self.description[self.value]
if descr:
return _(descr)
return descr
def getMulti(self, selected):
if self._descr is not None:
descr = self._descr
else:
descr = self._descr = self.description[self.value]
if descr:
return "text", _(descr)
return "text", descr
# HTML
def getHTML(self, id):
res = ""
for v in self.choices:
descr = self.description[v]
if self.value == v:
checked = 'checked="checked" '
else:
checked = ''
res += '<input type="radio" name="' + id + '" ' + checked + 'value="' + v + '">' + descr + "</input></br>\n"
return res
def unsafeAssign(self, value):
# setValue does check if value is in choices. This is safe enough.
self.value = value
description = property(lambda self: descriptionList(self.choices.choices, self.choices.type))
# a binary decision.
#
# several customized versions exist for different
# descriptions.
#
boolean_descriptions = {False: _("false"), True: _("true")}
class ConfigBoolean(ConfigElement):
def __init__(self, default = False, descriptions = boolean_descriptions):
ConfigElement.__init__(self)
self.descriptions = descriptions
self.value = self.last_value = self.default = default
def handleKey(self, key):
if key in (KEY_LEFT, KEY_RIGHT):
self.value = not self.value
elif key == KEY_HOME:
self.value = False
elif key == KEY_END:
self.value = True
def getText(self):
descr = self.descriptions[self.value]
if descr:
return _(descr)
return descr
def getMulti(self, selected):
descr = self.descriptions[self.value]
if descr:
return "text", _(descr)
return "text", descr
def tostring(self, value):
if not value:
return "false"
else:
return "true"
def fromstring(self, val):
if val == "true":
return True
else:
return False
def getHTML(self, id):
if self.value:
checked = ' checked="checked"'
else:
checked = ''
return '<input type="checkbox" name="' + id + '" value="1" ' + checked + " />"
# this is FLAWED. and must be fixed.
def unsafeAssign(self, value):
if value == "1":
self.value = True
else:
self.value = False
def onDeselect(self, session):
if not self.last_value == self.value:
self.changedFinal()
self.last_value = self.value
yes_no_descriptions = {False: _("no"), True: _("yes")}
class ConfigYesNo(ConfigBoolean):
def __init__(self, default = False):
ConfigBoolean.__init__(self, default = default, descriptions = yes_no_descriptions)
on_off_descriptions = {False: _("off"), True: _("on")}
class ConfigOnOff(ConfigBoolean):
def __init__(self, default = False):
ConfigBoolean.__init__(self, default = default, descriptions = on_off_descriptions)
enable_disable_descriptions = {False: _("disable"), True: _("enable")}
class ConfigEnableDisable(ConfigBoolean):
def __init__(self, default = False):
ConfigBoolean.__init__(self, default = default, descriptions = enable_disable_descriptions)
class ConfigDateTime(ConfigElement):
def __init__(self, default, formatstring, increment = 86400):
ConfigElement.__init__(self)
self.increment = increment
self.formatstring = formatstring
self.value = self.last_value = self.default = int(default)
def handleKey(self, key):
if key == KEY_LEFT:
self.value -= self.increment
elif key == KEY_RIGHT:
self.value += self.increment
elif key == KEY_HOME or key == KEY_END:
self.value = self.default
def getText(self):
return strftime(self.formatstring, localtime(self.value))
def getMulti(self, selected):
return "text", strftime(self.formatstring, localtime(self.value))
def fromstring(self, val):
return int(val)
# *THE* mighty config element class
#
# allows you to store/edit a sequence of values.
# can be used for IP-addresses, dates, plain integers, ...
# several helper exist to ease this up a bit.
#
class ConfigSequence(ConfigElement):
def __init__(self, seperator, limits, default, censor_char = ""):
ConfigElement.__init__(self)
assert isinstance(limits, list) and len(limits[0]) == 2, "limits must be [(min, max),...]-tuple-list"
assert censor_char == "" or len(censor_char) == 1, "censor char must be a single char (or \"\")"
#assert isinstance(default, list), "default must be a list"
#assert isinstance(default[0], int), "list must contain numbers"
#assert len(default) == len(limits), "length must match"
self.marked_pos = 0
self.seperator = seperator
self.limits = limits
self.censor_char = censor_char
self.last_value = self.default = default
self.value = copy_copy(default)
self.endNotifier = None
def validate(self):
max_pos = 0
num = 0
for i in self._value:
max_pos += len(str(self.limits[num][1]))
if self._value[num] < self.limits[num][0]:
self._value[num] = self.limits[num][0]
if self._value[num] > self.limits[num][1]:
self._value[num] = self.limits[num][1]
num += 1
if self.marked_pos >= max_pos:
if self.endNotifier:
for x in self.endNotifier:
x(self)
self.marked_pos = max_pos - 1
if self.marked_pos < 0:
self.marked_pos = 0
def validatePos(self):
if self.marked_pos < 0:
self.marked_pos = 0
total_len = sum([len(str(x[1])) for x in self.limits])
if self.marked_pos >= total_len:
self.marked_pos = total_len - 1
def addEndNotifier(self, notifier):
if self.endNotifier is None:
self.endNotifier = []
self.endNotifier.append(notifier)
def handleKey(self, key):
if key == KEY_LEFT:
self.marked_pos -= 1
self.validatePos()
elif key == KEY_RIGHT:
self.marked_pos += 1
self.validatePos()
elif key == KEY_HOME:
self.marked_pos = 0
self.validatePos()
elif key == KEY_END:
max_pos = 0
num = 0
for i in self._value:
max_pos += len(str(self.limits[num][1]))
num += 1
self.marked_pos = max_pos - 1
self.validatePos()
elif key in KEY_NUMBERS or key == KEY_ASCII:
if key == KEY_ASCII:
code = getPrevAsciiCode()
if code < 48 or code > 57:
return
number = code - 48
else:
number = getKeyNumber(key)
block_len = [len(str(x[1])) for x in self.limits]
total_len = sum(block_len)
pos = 0
blocknumber = 0
block_len_total = [0, ]
for x in block_len:
pos += block_len[blocknumber]
block_len_total.append(pos)
if pos - 1 >= self.marked_pos:
pass
else:
blocknumber += 1
# length of numberblock
number_len = len(str(self.limits[blocknumber][1]))
# position in the block
posinblock = self.marked_pos - block_len_total[blocknumber]
oldvalue = self._value[blocknumber]
olddec = oldvalue % 10 ** (number_len - posinblock) - (oldvalue % 10 ** (number_len - posinblock - 1))
newvalue = oldvalue - olddec + (10 ** (number_len - posinblock - 1) * number)
self._value[blocknumber] = newvalue
self.marked_pos += 1
self.validate()
self.changed()
def genText(self):
value = ""
mPos = self.marked_pos
num = 0
for i in self._value:
if value: #fixme no heading separator possible
value += self.seperator
if mPos >= len(value) - 1:
mPos += 1
if self.censor_char == "":
value += ("%0" + str(len(str(self.limits[num][1]))) + "d") % i
else:
value += (self.censor_char * len(str(self.limits[num][1])))
num += 1
return value, mPos
def getText(self):
(value, mPos) = self.genText()
return value
def getMulti(self, selected):
(value, mPos) = self.genText()
# only mark cursor when we are selected
# (this code is heavily ink optimized!)
if self.enabled:
return "mtext"[1-selected:], value, [mPos]
else:
return "text", value
def tostring(self, val):
return self.seperator.join([self.saveSingle(x) for x in val])
def saveSingle(self, v):
return str(v)
def fromstring(self, value):
return [int(x) for x in value.split(self.seperator)]
def onDeselect(self, session):
if self.last_value != self._value:
self.changedFinal()
self.last_value = copy_copy(self._value)
ip_limits = [(0,255),(0,255),(0,255),(0,255)]
class ConfigIP(ConfigSequence):
def __init__(self, default, auto_jump = False):
ConfigSequence.__init__(self, seperator = ".", limits = ip_limits, default = default)
self.block_len = [len(str(x[1])) for x in self.limits]
self.marked_block = 0
self.overwrite = True
self.auto_jump = auto_jump
def handleKey(self, key):
if key == KEY_LEFT:
if self.marked_block > 0:
self.marked_block -= 1
self.overwrite = True
elif key == KEY_RIGHT:
if self.marked_block < len(self.limits)-1:
self.marked_block += 1
self.overwrite = True
elif key == KEY_HOME:
self.marked_block = 0
self.overwrite = True
elif key == KEY_END:
self.marked_block = len(self.limits)-1
self.overwrite = True
elif key in KEY_NUMBERS or key == KEY_ASCII:
if key == KEY_ASCII:
code = getPrevAsciiCode()
if code < 48 or code > 57:
return
number = code - 48
else:
number = getKeyNumber(key)
oldvalue = self._value[self.marked_block]
if self.overwrite:
self._value[self.marked_block] = number
self.overwrite = False
else:
oldvalue *= 10
newvalue = oldvalue + number
if self.auto_jump and newvalue > self.limits[self.marked_block][1] and self.marked_block < len(self.limits)-1:
self.handleKey(KEY_RIGHT)
self.handleKey(key)
return
else:
self._value[self.marked_block] = newvalue
if len(str(self._value[self.marked_block])) >= self.block_len[self.marked_block]:
self.handleKey(KEY_RIGHT)
self.validate()
self.changed()
def genText(self):
value = ""
block_strlen = []
for i in self._value:
block_strlen.append(len(str(i)))
if value:
value += self.seperator
value += str(i)
leftPos = sum(block_strlen[:self.marked_block])+self.marked_block
rightPos = sum(block_strlen[:(self.marked_block+1)])+self.marked_block
mBlock = range(leftPos, rightPos)
return value, mBlock
def getMulti(self, selected):
(value, mBlock) = self.genText()
if self.enabled:
return "mtext"[1-selected:], value, mBlock
else:
return "text", value
def getHTML(self, id):
# we definitely don't want leading zeros
return '.'.join(["%d" % d for d in self.value])
mac_limits = [(1,255),(1,255),(1,255),(1,255),(1,255),(1,255)]
class ConfigMAC(ConfigSequence):
def __init__(self, default):
ConfigSequence.__init__(self, seperator = ":", limits = mac_limits, default = default)
class ConfigMacText(ConfigElement, NumericalTextInput):
def __init__(self, default = "", visible_width = False):
ConfigElement.__init__(self)
NumericalTextInput.__init__(self, nextFunc = self.nextFunc, handleTimeout = False)
self.marked_pos = 0
self.allmarked = (default != "")
self.fixed_size = 17
self.visible_width = visible_width
self.offset = 0
self.overwrite = 17
self.help_window = None
self.value = self.last_value = self.default = default
self.useableChars = '0123456789ABCDEF'
def validateMarker(self):
textlen = len(self.text)
if self.marked_pos > textlen-1:
self.marked_pos = textlen-1
elif self.marked_pos < 0:
self.marked_pos = 0
def insertChar(self, ch, pos, owr):
if self.text[pos] == ':':
pos += 1
if owr or self.overwrite:
self.text = self.text[0:pos] + ch + self.text[pos + 1:]
elif self.fixed_size:
self.text = self.text[0:pos] + ch + self.text[pos:-1]
else:
self.text = self.text[0:pos] + ch + self.text[pos:]
def handleKey(self, key):
if key == KEY_LEFT:
self.timeout()
if self.allmarked:
self.marked_pos = len(self.text)
self.allmarked = False
else:
if self.text[self.marked_pos-1] == ':':
self.marked_pos -= 2
else:
self.marked_pos -= 1
elif key == KEY_RIGHT:
self.timeout()
if self.allmarked:
self.marked_pos = 0
self.allmarked = False
else:
if self.marked_pos < (len(self.text)-1):
if self.text[self.marked_pos+1] == ':':
self.marked_pos += 2
else:
self.marked_pos += 1
elif key in KEY_NUMBERS:
owr = self.lastKey == getKeyNumber(key)
newChar = self.getKey(getKeyNumber(key))
self.insertChar(newChar, self.marked_pos, owr)
elif key == KEY_TIMEOUT:
self.timeout()
if self.help_window:
self.help_window.update(self)
if self.text[self.marked_pos] == ':':
self.marked_pos += 1
return
if self.help_window:
self.help_window.update(self)
self.validateMarker()
self.changed()
def nextFunc(self):
self.marked_pos += 1
self.validateMarker()
self.changed()
def getValue(self):
try:
return self.text.encode("utf-8")
except UnicodeDecodeError:
print "Broken UTF8!"
return self.text
def setValue(self, val):
try:
self.text = val.decode("utf-8")
except UnicodeDecodeError:
self.text = val.decode("utf-8", "ignore")
print "Broken UTF8!"
value = property(getValue, setValue)
_value = property(getValue, setValue)
def getText(self):
return self.text.encode("utf-8")
def getMulti(self, selected):
if self.visible_width:
if self.allmarked:
mark = range(0, min(self.visible_width, len(self.text)))
else:
mark = [self.marked_pos-self.offset]
return "mtext"[1-selected:], self.text[self.offset:self.offset+self.visible_width].encode("utf-8")+" ", mark
else:
if self.allmarked:
mark = range(0, len(self.text))
else:
mark = [self.marked_pos]
return "mtext"[1-selected:], self.text.encode("utf-8")+" ", mark
def onSelect(self, session):
self.allmarked = (self.value != "")
if session is not None:
from Screens.NumericalTextInputHelpDialog import NumericalTextInputHelpDialog
self.help_window = session.instantiateDialog(NumericalTextInputHelpDialog, self)
self.help_window.show()
def onDeselect(self, session):
self.marked_pos = 0
self.offset = 0
if self.help_window:
session.deleteDialog(self.help_window)
self.help_window = None
if not self.last_value == self.value:
self.changedFinal()
self.last_value = self.value
def getHTML(self, id):
return '<input type="text" name="' + id + '" value="' + self.value + '" /><br>\n'
def unsafeAssign(self, value):
self.value = str(value)
class ConfigPosition(ConfigSequence):
def __init__(self, default, args):
ConfigSequence.__init__(self, seperator = ",", limits = [(0,args[0]),(0,args[1]),(0,args[2]),(0,args[3])], default = default)
clock_limits = [(0,23),(0,59)]
class ConfigClock(ConfigSequence):
def __init__(self, default):
t = localtime(default)
ConfigSequence.__init__(self, seperator = ":", limits = clock_limits, default = [t.tm_hour, t.tm_min])
def increment(self):
# Check if Minutes maxed out
if self._value[1] == 59:
# Increment Hour, reset Minutes
if self._value[0] < 23:
self._value[0] += 1
else:
self._value[0] = 0
self._value[1] = 0
else:
# Increment Minutes
self._value[1] += 1
# Trigger change
self.changed()
def decrement(self):
# Check if Minutes is minimum
if self._value[1] == 0:
# Decrement Hour, set Minutes to 59
if self._value[0] > 0:
self._value[0] -= 1
else:
self._value[0] = 23
self._value[1] = 59
else:
# Decrement Minutes
self._value[1] -= 1
# Trigger change
self.changed()
integer_limits = (0, 9999999999)
class ConfigInteger(ConfigSequence):
def __init__(self, default, limits = integer_limits):
ConfigSequence.__init__(self, seperator = ":", limits = [limits], default = default)
# you need to override this to do input validation
def setValue(self, value):
self._value = [value]
self.changed()
def getValue(self):
return self._value[0]
value = property(getValue, setValue)
def fromstring(self, value):
return int(value)
def tostring(self, value):
return str(value)
class ConfigPIN(ConfigInteger):
def __init__(self, default, len = 4, censor = ""):
assert isinstance(default, int), "ConfigPIN default must be an integer"
if default == -1:
default = "aaaa"
ConfigSequence.__init__(self, seperator = ":", limits = [(0, (10**len)-1)], censor_char = censor, default = default)
self.len = len
def getLength(self):
return self.len
class ConfigFloat(ConfigSequence):
def __init__(self, default, limits):
ConfigSequence.__init__(self, seperator = ".", limits = limits, default = default)
def getFloat(self):
return float(self.value[1] / float(self.limits[1][1] + 1) + self.value[0])
float = property(getFloat)
# an editable text...
class ConfigText(ConfigElement, NumericalTextInput):
def __init__(self, default = "", fixed_size = True, visible_width = False):
ConfigElement.__init__(self)
NumericalTextInput.__init__(self, nextFunc = self.nextFunc, handleTimeout = False)
self.marked_pos = 0
self.allmarked = (default != "")
self.fixed_size = fixed_size
self.visible_width = visible_width
self.offset = 0
self.overwrite = fixed_size
self.help_window = None
self.value = self.last_value = self.default = default
def validateMarker(self):
textlen = len(self.text)
if self.fixed_size:
if self.marked_pos > textlen-1:
self.marked_pos = textlen-1
else:
if self.marked_pos > textlen:
self.marked_pos = textlen
if self.marked_pos < 0:
self.marked_pos = 0
if self.visible_width:
if self.marked_pos < self.offset:
self.offset = self.marked_pos
if self.marked_pos >= self.offset + self.visible_width:
if self.marked_pos == textlen:
self.offset = self.marked_pos - self.visible_width
else:
self.offset = self.marked_pos - self.visible_width + 1
if self.offset > 0 and self.offset + self.visible_width > textlen:
self.offset = max(0, len - self.visible_width)
def insertChar(self, ch, pos, owr):
if owr or self.overwrite:
self.text = self.text[0:pos] + ch + self.text[pos + 1:]
elif self.fixed_size:
self.text = self.text[0:pos] + ch + self.text[pos:-1]
else:
self.text = self.text[0:pos] + ch + self.text[pos:]
def deleteChar(self, pos):
if not self.fixed_size:
self.text = self.text[0:pos] + self.text[pos + 1:]
elif self.overwrite:
self.text = self.text[0:pos] + " " + self.text[pos + 1:]
else:
self.text = self.text[0:pos] + self.text[pos + 1:] + " "
def deleteAllChars(self):
if self.fixed_size:
self.text = " " * len(self.text)
else:
self.text = ""
self.marked_pos = 0
def handleKey(self, key):
# this will no change anything on the value itself
# so we can handle it here in gui element
if key == KEY_DELETE:
self.timeout()
if self.allmarked:
self.deleteAllChars()
self.allmarked = False
else:
self.deleteChar(self.marked_pos)
if self.fixed_size and self.overwrite:
self.marked_pos += 1
elif key == KEY_BACKSPACE:
self.timeout()
if self.allmarked:
self.deleteAllChars()
self.allmarked = False
elif self.marked_pos > 0:
self.deleteChar(self.marked_pos-1)
if not self.fixed_size and self.offset > 0:
self.offset -= 1
self.marked_pos -= 1
elif key == KEY_LEFT:
self.timeout()
if self.allmarked:
self.marked_pos = len(self.text)
self.allmarked = False
else:
self.marked_pos -= 1
elif key == KEY_RIGHT:
self.timeout()
if self.allmarked:
self.marked_pos = 0
self.allmarked = False
else:
self.marked_pos += 1
elif key == KEY_HOME:
self.timeout()
self.allmarked = False
self.marked_pos = 0
elif key == KEY_END:
self.timeout()
self.allmarked = False
self.marked_pos = len(self.text)
elif key == KEY_TOGGLEOW:
self.timeout()
self.overwrite = not self.overwrite
elif key == KEY_ASCII:
self.timeout()
newChar = unichr(getPrevAsciiCode())
if not self.useableChars or newChar in self.useableChars:
if self.allmarked:
self.deleteAllChars()
self.allmarked = False
self.insertChar(newChar, self.marked_pos, False)
self.marked_pos += 1
elif key in KEY_NUMBERS:
owr = self.lastKey == getKeyNumber(key)
newChar = self.getKey(getKeyNumber(key))
if self.allmarked:
self.deleteAllChars()
self.allmarked = False
self.insertChar(newChar, self.marked_pos, owr)
elif key == KEY_TIMEOUT:
self.timeout()
if self.help_window:
self.help_window.update(self)
return
if self.help_window:
self.help_window.update(self)
self.validateMarker()
self.changed()
def nextFunc(self):
self.marked_pos += 1
self.validateMarker()
self.changed()
def getValue(self):
try:
return self.text.encode("utf-8")
except UnicodeDecodeError:
print "Broken UTF8!"
return self.text
def setValue(self, val):
try:
self.text = val.decode("utf-8")
except UnicodeDecodeError:
self.text = val.decode("utf-8", "ignore")
print "Broken UTF8!"
value = property(getValue, setValue)
_value = property(getValue, setValue)
def getText(self):
return self.text.encode("utf-8")
def getMulti(self, selected):
if self.visible_width:
if self.allmarked:
mark = range(0, min(self.visible_width, len(self.text)))
else:
mark = [self.marked_pos-self.offset]
return "mtext"[1-selected:], self.text[self.offset:self.offset+self.visible_width].encode("utf-8")+" ", mark
else:
if self.allmarked:
mark = range(0, len(self.text))
else:
mark = [self.marked_pos]
return "mtext"[1-selected:], self.text.encode("utf-8")+" ", mark
def onSelect(self, session):
self.allmarked = (self.value != "")
if session is not None:
from Screens.NumericalTextInputHelpDialog import NumericalTextInputHelpDialog
self.help_window = session.instantiateDialog(NumericalTextInputHelpDialog, self)
self.help_window.show()
def onDeselect(self, session):
self.marked_pos = 0
self.offset = 0
if self.help_window:
session.deleteDialog(self.help_window)
self.help_window = None
if not self.last_value == self.value:
self.changedFinal()
self.last_value = self.value
def getHTML(self, id):
return '<input type="text" name="' + id + '" value="' + self.value + '" /><br>\n'
def unsafeAssign(self, value):
self.value = str(value)
class ConfigPassword(ConfigText):
def __init__(self, default = "", fixed_size = False, visible_width = False, censor = "*"):
ConfigText.__init__(self, default = default, fixed_size = fixed_size, visible_width = visible_width)
self.censor_char = censor
self.hidden = True
def getMulti(self, selected):
mtext, text, mark = ConfigText.getMulti(self, selected)
if self.hidden:
text = len(text) * self.censor_char
return mtext, text, mark
def onSelect(self, session):
ConfigText.onSelect(self, session)
self.hidden = False
def onDeselect(self, session):
ConfigText.onDeselect(self, session)
self.hidden = True
# lets the user select between [min, min+stepwidth, min+(stepwidth*2)..., maxval] with maxval <= max depending
# on the stepwidth
# min, max, stepwidth, default are int values
# wraparound: pressing RIGHT key at max value brings you to min value and vice versa if set to True
class ConfigSelectionNumber(ConfigSelection):
def __init__(self, min, max, stepwidth, default = None, wraparound = False):
self.wraparound = wraparound
if default is None:
default = min
default = str(default)
choices = []
step = min
while step <= max:
choices.append(str(step))
step += stepwidth
ConfigSelection.__init__(self, choices, default)
def getValue(self):
return int(ConfigSelection.getValue(self))
def setValue(self, val):
ConfigSelection.setValue(self, str(val))
value = property(getValue, setValue)
def getIndex(self):
return self.choices.index(self.value)
index = property(getIndex)
def handleKey(self, key):
if not self.wraparound:
if key == KEY_RIGHT:
if len(self.choices) == (self.choices.index(str(self.value)) + 1):
return
if key == KEY_LEFT:
if self.choices.index(str(self.value)) == 0:
return
nchoices = len(self.choices)
if nchoices > 1:
i = self.choices.index(str(self.value))
if key == KEY_LEFT:
self.value = self.choices[(i + nchoices - 1) % nchoices]
elif key == KEY_RIGHT:
self.value = self.choices[(i + 1) % nchoices]
elif key == KEY_HOME:
self.value = self.choices[0]
elif key == KEY_END:
self.value = self.choices[nchoices - 1]
class ConfigNumber(ConfigText):
def __init__(self, default = 0):
ConfigText.__init__(self, str(default), fixed_size = False)
def getValue(self):
return int(self.text)
def setValue(self, val):
self.text = str(val)
value = property(getValue, setValue)
_value = property(getValue, setValue)
def isChanged(self):
sv = self.saved_value
strv = self.tostring(self.value)
if sv is None and strv == self.default:
return False
return strv != sv
def conform(self):
pos = len(self.text) - self.marked_pos
self.text = self.text.lstrip("0")
if self.text == "":
self.text = "0"
if pos > len(self.text):
self.marked_pos = 0
else:
self.marked_pos = len(self.text) - pos
def handleKey(self, key):
if key in KEY_NUMBERS or key == KEY_ASCII:
if key == KEY_ASCII:
ascii = getPrevAsciiCode()
if not (48 <= ascii <= 57):
return
else:
ascii = getKeyNumber(key) + 48
newChar = unichr(ascii)
if self.allmarked:
self.deleteAllChars()
self.allmarked = False
self.insertChar(newChar, self.marked_pos, False)
self.marked_pos += 1
else:
ConfigText.handleKey(self, key)
self.conform()
def onSelect(self, session):
self.allmarked = (self.value != "")
def onDeselect(self, session):
self.marked_pos = 0
self.offset = 0
if not self.last_value == self.value:
self.changedFinal()
self.last_value = self.value
class ConfigSearchText(ConfigText):
def __init__(self, default = "", fixed_size = False, visible_width = False):
ConfigText.__init__(self, default = default, fixed_size = fixed_size, visible_width = visible_width)
NumericalTextInput.__init__(self, nextFunc = self.nextFunc, handleTimeout = False, search = True)
class ConfigDirectory(ConfigText):
def __init__(self, default="", visible_width=60):
ConfigText.__init__(self, default, fixed_size = True, visible_width = visible_width)
def handleKey(self, key):
pass
def getValue(self):
if self.text == "":
return None
else:
return ConfigText.getValue(self)
def setValue(self, val):
if val is None:
val = ""
ConfigText.setValue(self, val)
def getMulti(self, selected):
if self.text == "":
return "mtext"[1-selected:], _("List of storage devices"), range(0)
else:
return ConfigText.getMulti(self, selected)
def onSelect(self, session):
self.allmarked = (self.value != "")
# a slider.
class ConfigSlider(ConfigElement):
def __init__(self, default = 0, increment = 1, limits = (0, 100)):
ConfigElement.__init__(self)
self.value = self.last_value = self.default = default
self.min = limits[0]
self.max = limits[1]
self.increment = increment
def checkValues(self):
if self.value < self.min:
self.value = self.min
if self.value > self.max:
self.value = self.max
def handleKey(self, key):
if key == KEY_LEFT:
self.value -= self.increment
elif key == KEY_RIGHT:
self.value += self.increment
elif key == KEY_HOME:
self.value = self.min
elif key == KEY_END:
self.value = self.max
else:
return
self.checkValues()
def getText(self):
return "%d / %d" % (self.value, self.max)
def getMulti(self, selected):
self.checkValues()
return "slider", self.value, self.max
def fromstring(self, value):
return int(value)
# a satlist. in fact, it's a ConfigSelection.
class ConfigSatlist(ConfigSelection):
def __init__(self, list, default = None):
if default is not None:
default = str(default)
ConfigSelection.__init__(self, choices = [(str(orbpos), desc) for (orbpos, desc, flags) in list], default = default)
def getOrbitalPosition(self):
if self.value == "":
return None
return int(self.value)
orbital_position = property(getOrbitalPosition)
class ConfigSet(ConfigElement):
def __init__(self, choices, default=None):
if not default: default = []
ConfigElement.__init__(self)
if isinstance(choices, list):
choices.sort()
self.choices = choicesList(choices, choicesList.LIST_TYPE_LIST)
else:
assert False, "ConfigSet choices must be a list!"
if default is None:
default = []
self.pos = -1
default.sort()
self.last_value = self.default = default
self.value = default[:]
def toggleChoice(self, choice):
value = self.value
if choice in value:
value.remove(choice)
else:
value.append(choice)
value.sort()
self.changed()
def handleKey(self, key):
if key in KEY_NUMBERS + [KEY_DELETE, KEY_BACKSPACE]:
if self.pos != -1:
self.toggleChoice(self.choices[self.pos])
elif key == KEY_LEFT:
if self.pos < 0:
self.pos = len(self.choices)-1
else:
self.pos -= 1
elif key == KEY_RIGHT:
if self.pos >= len(self.choices)-1:
self.pos = -1
else:
self.pos += 1
elif key in (KEY_HOME, KEY_END):
self.pos = -1
def genString(self, lst):
res = ""
for x in lst:
res += self.description[x]+" "
return res
def getText(self):
return self.genString(self.value)
def getMulti(self, selected):
if not selected or self.pos == -1:
return "text", self.genString(self.value)
else:
tmp = self.value[:]
ch = self.choices[self.pos]
mem = ch in self.value
if not mem:
tmp.append(ch)
tmp.sort()
ind = tmp.index(ch)
val1 = self.genString(tmp[:ind])
val2 = " "+self.genString(tmp[ind+1:])
if mem:
chstr = " "+self.description[ch]+" "
else:
chstr = "("+self.description[ch]+")"
len_val1 = len(val1)
return "mtext", val1+chstr+val2, range(len_val1, len_val1 + len(chstr))
def onDeselect(self, session):
self.pos = -1
if not self.last_value == self.value:
self.changedFinal()
self.last_value = self.value[:]
def tostring(self, value):
return str(value)
def fromstring(self, val):
return eval(val)
description = property(lambda self: descriptionList(self.choices.choices, choicesList.LIST_TYPE_LIST))
class ConfigLocations(ConfigElement):
def __init__(self, default=None, visible_width=False):
if not default: default = []
ConfigElement.__init__(self)
self.visible_width = visible_width
self.pos = -1
self.default = default
self.locations = []
self.mountpoints = []
self.value = default[:]
def setValue(self, value):
locations = self.locations
loc = [x[0] for x in locations if x[3]]
add = [x for x in value if not x in loc]
diff = add + [x for x in loc if not x in value]
locations = [x for x in locations if not x[0] in diff] + [[x, self.getMountpoint(x), True, True] for x in add]
#locations.sort(key = lambda x: x[0])
self.locations = locations
self.changed()
def getValue(self):
self.checkChangedMountpoints()
locations = self.locations
for x in locations:
x[3] = x[2]
return [x[0] for x in locations if x[3]]
value = property(getValue, setValue)
def tostring(self, value):
return str(value)
def fromstring(self, val):
return eval(val)
def load(self):
sv = self.saved_value
if sv is None:
tmp = self.default
else:
tmp = self.fromstring(sv)
locations = [[x, None, False, False] for x in tmp]
self.refreshMountpoints()
for x in locations:
if fileExists(x[0]):
x[1] = self.getMountpoint(x[0])
x[2] = True
self.locations = locations
def save(self):
locations = self.locations
if self.save_disabled or not locations:
self.saved_value = None
else:
self.saved_value = self.tostring([x[0] for x in locations])
def isChanged(self):
sv = self.saved_value
locations = self.locations
if val is None and not locations:
return False
return self.tostring([x[0] for x in locations]) != sv
def addedMount(self, mp):
for x in self.locations:
if x[1] == mp:
x[2] = True
elif x[1] is None and fileExists(x[0]):
x[1] = self.getMountpoint(x[0])
x[2] = True
def removedMount(self, mp):
for x in self.locations:
if x[1] == mp:
x[2] = False
def refreshMountpoints(self):
self.mountpoints = [p.mountpoint for p in harddiskmanager.getMountedPartitions() if p.mountpoint != "/"]
self.mountpoints.sort(key = lambda x: -len(x))
def checkChangedMountpoints(self):
oldmounts = self.mountpoints
self.refreshMountpoints()
newmounts = self.mountpoints
if oldmounts == newmounts:
return
for x in oldmounts:
if not x in newmounts:
self.removedMount(x)
for x in newmounts:
if not x in oldmounts:
self.addedMount(x)
def getMountpoint(self, file):
file = os_path.realpath(file)+"/"
for m in self.mountpoints:
if file.startswith(m):
return m
return None
def handleKey(self, key):
if key == KEY_LEFT:
self.pos -= 1
if self.pos < -1:
self.pos = len(self.value)-1
elif key == KEY_RIGHT:
self.pos += 1
if self.pos >= len(self.value):
self.pos = -1
elif key in (KEY_HOME, KEY_END):
self.pos = -1
def getText(self):
return " ".join(self.value)
def getMulti(self, selected):
if not selected:
valstr = " ".join(self.value)
if self.visible_width and len(valstr) > self.visible_width:
return "text", valstr[0:self.visible_width]
else:
return "text", valstr
else:
i = 0
valstr = ""
ind1 = 0
ind2 = 0
for val in self.value:
if i == self.pos:
ind1 = len(valstr)
valstr += str(val)+" "
if i == self.pos:
ind2 = len(valstr)
i += 1
if self.visible_width and len(valstr) > self.visible_width:
if ind1+1 < self.visible_width/2:
off = 0
else:
off = min(ind1+1-self.visible_width/2, len(valstr)-self.visible_width)
return "mtext", valstr[off:off+self.visible_width], range(ind1-off,ind2-off)
else:
return "mtext", valstr, range(ind1,ind2)
def onDeselect(self, session):
self.pos = -1
# nothing.
class ConfigNothing(ConfigSelection):
def __init__(self):
ConfigSelection.__init__(self, choices = [("","")])
# until here, 'saved_value' always had to be a *string*.
# now, in ConfigSubsection, and only there, saved_value
# is a dict, essentially forming a tree.
#
# config.foo.bar=True
# config.foobar=False
#
# turns into:
# config.saved_value == {"foo": {"bar": "True"}, "foobar": "False"}
#
class ConfigSubsectionContent(object):
pass
# we store a backup of the loaded configuration
# data in self.stored_values, to be able to deploy
# them when a new config element will be added,
# so non-default values are instantly available
# A list, for example:
# config.dipswitches = ConfigSubList()
# config.dipswitches.append(ConfigYesNo())
# config.dipswitches.append(ConfigYesNo())
# config.dipswitches.append(ConfigYesNo())
class ConfigSubList(list, object):
def __init__(self):
list.__init__(self)
self.stored_values = {}
def save(self):
for x in self:
x.save()
def load(self):
for x in self:
x.load()
def getSavedValue(self):
res = { }
for i, val in enumerate(self):
sv = val.saved_value
if sv is not None:
res[str(i)] = sv
return res
def setSavedValue(self, values):
self.stored_values = dict(values)
for (key, val) in self.stored_values.items():
if int(key) < len(self):
self[int(key)].saved_value = val
saved_value = property(getSavedValue, setSavedValue)
def append(self, item):
i = str(len(self))
list.append(self, item)
if i in self.stored_values:
item.saved_value = self.stored_values[i]
item.load()
def dict(self):
return dict([(str(index), value) for index, value in enumerate(self)])
# same as ConfigSubList, just as a dictionary.
# care must be taken that the 'key' has a proper
# str() method, because it will be used in the config
# file.
class ConfigSubDict(dict, object):
def __init__(self):
dict.__init__(self)
self.stored_values = {}
def save(self):
for x in self.values():
x.save()
def load(self):
for x in self.values():
x.load()
def getSavedValue(self):
res = {}
for (key, val) in self.items():
sv = val.saved_value
if sv is not None:
res[str(key)] = sv
return res
def setSavedValue(self, values):
self.stored_values = dict(values)
for (key, val) in self.items():
if str(key) in self.stored_values:
val.saved_value = self.stored_values[str(key)]
saved_value = property(getSavedValue, setSavedValue)
def __setitem__(self, key, item):
dict.__setitem__(self, key, item)
if str(key) in self.stored_values:
item.saved_value = self.stored_values[str(key)]
item.load()
def dict(self):
return self
# Like the classes above, just with a more "native"
# syntax.
#
# some evil stuff must be done to allow instant
# loading of added elements. this is why this class
# is so complex.
#
# we need the 'content' because we overwrite
# __setattr__.
# If you don't understand this, try adding
# __setattr__ to a usual exisiting class and you will.
class ConfigSubsection(object):
def __init__(self):
self.__dict__["content"] = ConfigSubsectionContent()
self.content.items = { }
self.content.stored_values = { }
def __setattr__(self, name, value):
if name == "saved_value":
return self.setSavedValue(value)
assert isinstance(value, (ConfigSubsection, ConfigElement, ConfigSubList, ConfigSubDict)), "ConfigSubsections can only store ConfigSubsections, ConfigSubLists, ConfigSubDicts or ConfigElements"
content = self.content
content.items[name] = value
x = content.stored_values.get(name, None)
if x is not None:
#print "ok, now we have a new item,", name, "and have the following value for it:", x
value.saved_value = x
value.load()
def __getattr__(self, name):
return self.content.items[name]
def getSavedValue(self):
res = self.content.stored_values
for (key, val) in self.content.items.items():
sv = val.saved_value
if sv is not None:
res[key] = sv
elif key in res:
del res[key]
return res
def setSavedValue(self, values):
values = dict(values)
self.content.stored_values = values
for (key, val) in self.content.items.items():
value = values.get(key, None)
if value is not None:
val.saved_value = value
saved_value = property(getSavedValue, setSavedValue)
def save(self):
for x in self.content.items.values():
x.save()
def load(self):
for x in self.content.items.values():
x.load()
def dict(self):
return self.content.items
# the root config object, which also can "pickle" (=serialize)
# down the whole config tree.
#
# we try to keep non-existing config entries, to apply them whenever
# a new config entry is added to a subsection
# also, non-existing config entries will be saved, so they won't be
# lost when a config entry disappears.
class Config(ConfigSubsection):
def __init__(self):
ConfigSubsection.__init__(self)
def pickle_this(self, prefix, topickle, result):
for (key, val) in topickle.items():
name = '.'.join((prefix, key))
if isinstance(val, dict):
self.pickle_this(name, val, result)
elif isinstance(val, tuple):
result += [name, '=', str(val[0]), '\n']
else:
result += [name, '=', str(val), '\n']
def pickle(self):
result = []
self.pickle_this("config", self.saved_value, result)
return ''.join(result)
def unpickle(self, lines, base_file=True):
tree = { }
configbase = tree.setdefault("config", {})
for l in lines:
if not l or l[0] == '#':
continue
result = l.split('=', 1)
if len(result) != 2:
continue
(name, val) = result
val = val.strip()
names = name.split('.')
base = configbase
for n in names[1:-1]:
base = base.setdefault(n, {})
base[names[-1]] = val
if not base_file: # not the initial config file..
#update config.x.y.value when exist
try:
configEntry = eval(name)
if configEntry is not None:
configEntry.value = val
except (SyntaxError, KeyError):
pass
# we inherit from ConfigSubsection, so ...
#object.__setattr__(self, "saved_value", tree["config"])
if "config" in tree:
self.setSavedValue(tree["config"])
def saveToFile(self, filename):
text = self.pickle()
try:
import os
f = open(filename + ".writing", "w")
f.write(text)
f.flush()
os.fsync(f.fileno())
f.close()
os.rename(filename + ".writing", filename)
except IOError:
print "Config: Couldn't write %s" % filename
def loadFromFile(self, filename, base_file=True):
f = open(filename, "r")
self.unpickle(f.readlines(), base_file)
f.close()
config = Config()
config.misc = ConfigSubsection()
class ConfigFile:
def __init__(self):
pass
CONFIG_FILE = resolveFilename(SCOPE_CONFIG, "settings")
def load(self):
try:
config.loadFromFile(self.CONFIG_FILE, True)
except IOError, e:
print "unable to load config (%s), assuming defaults..." % str(e)
def save(self):
# config.save()
config.saveToFile(self.CONFIG_FILE)
def __resolveValue(self, pickles, cmap):
key = pickles[0]
if cmap.has_key(key):
if len(pickles) > 1:
return self.__resolveValue(pickles[1:], cmap[key].dict())
else:
return str(cmap[key].value)
return None
def getResolvedKey(self, key):
names = key.split('.')
if len(names) > 1:
if names[0] == "config":
ret=self.__resolveValue(names[1:], config.content.items)
if ret and len(ret):
return ret
print "getResolvedKey", key, "failed !! (Typo??)"
return ""
def NoSave(element):
element.disableSave()
return element
configfile = ConfigFile()
configfile.load()
def getConfigListEntry(*args):
assert len(args) > 1, "getConfigListEntry needs a minimum of two arguments (descr, configElement)"
return args
def updateConfigElement(element, newelement):
newelement.value = element.value
return newelement
#def _(x):
# return x
#
#config.bla = ConfigSubsection()
#config.bla.test = ConfigYesNo()
#config.nim = ConfigSubList()
#config.nim.append(ConfigSubsection())
#config.nim[0].bla = ConfigYesNo()
#config.nim.append(ConfigSubsection())
#config.nim[1].bla = ConfigYesNo()
#config.nim[1].blub = ConfigYesNo()
#config.arg = ConfigSubDict()
#config.arg["Hello"] = ConfigYesNo()
#
#config.arg["Hello"].handleKey(KEY_RIGHT)
#config.arg["Hello"].handleKey(KEY_RIGHT)
#
##config.saved_value
#
##configfile.save()
#config.save()
#print config.pickle()
cec_limits = [(0,15),(0,15),(0,15),(0,15)]
class ConfigCECAddress(ConfigSequence):
def __init__(self, default, auto_jump = False):
ConfigSequence.__init__(self, seperator = ".", limits = cec_limits, default = default)
self.block_len = [len(str(x[1])) for x in self.limits]
self.marked_block = 0
self.overwrite = True
self.auto_jump = auto_jump
def handleKey(self, key):
if key == KEY_LEFT:
if self.marked_block > 0:
self.marked_block -= 1
self.overwrite = True
elif key == KEY_RIGHT:
if self.marked_block < len(self.limits)-1:
self.marked_block += 1
self.overwrite = True
elif key == KEY_HOME:
self.marked_block = 0
self.overwrite = True
elif key == KEY_END:
self.marked_block = len(self.limits)-1
self.overwrite = True
elif key in KEY_NUMBERS or key == KEY_ASCII:
if key == KEY_ASCII:
code = getPrevAsciiCode()
if code < 48 or code > 57:
return
number = code - 48
else:
number = getKeyNumber(key)
oldvalue = self._value[self.marked_block]
if self.overwrite:
self._value[self.marked_block] = number
self.overwrite = False
else:
oldvalue *= 10
newvalue = oldvalue + number
if self.auto_jump and newvalue > self.limits[self.marked_block][1] and self.marked_block < len(self.limits)-1:
self.handleKey(KEY_RIGHT)
self.handleKey(key)
return
else:
self._value[self.marked_block] = newvalue
if len(str(self._value[self.marked_block])) >= self.block_len[self.marked_block]:
self.handleKey(KEY_RIGHT)
self.validate()
self.changed()
def genText(self):
value = ""
block_strlen = []
for i in self._value:
block_strlen.append(len(str(i)))
if value:
value += self.seperator
value += str(i)
leftPos = sum(block_strlen[:self.marked_block])+self.marked_block
rightPos = sum(block_strlen[:(self.marked_block+1)])+self.marked_block
mBlock = range(leftPos, rightPos)
return value, mBlock
def getMulti(self, selected):
(value, mBlock) = self.genText()
if self.enabled:
return "mtext"[1-selected:], value, mBlock
else:
return "text", value
def getHTML(self, id):
# we definitely don't want leading zeros
return '.'.join(["%d" % d for d in self.value])
| gpl-2.0 |
jianjunz/online-judge-solutions | leetcode/1223-graph-connectivity-with-threshold.py | 2 | 1095 | class Solution:
def areConnected(self, n: int, threshold: int, queries: List[List[int]]) -> List[bool]:
cities=[0]*(n+1)
group={}
nextGroupId=1
def union(source, to):
if source==to:
return
for c in group[source]:
cities[c]=to
group[to].extend(group[source])
del group[source]
for base in range(threshold+1, n):
currentGroupId=nextGroupId
nextGroupId+=1
group[currentGroupId]=[]
for member in range(base, n+1, base):
if cities[member]==0:
cities[member]=currentGroupId
group[currentGroupId].append(member)
else:
union(cities[member], currentGroupId)
answer=[False]*len(queries)
for i in range(len(queries)):
u,v=queries[i]
if cities[u]==cities[v] and cities[u]!=0:
answer[i]=True
return answer
| mit |
nagnath006/Soccer-Analytics | Soccer-Analytics/Lib/encodings/utf_16_be.py | 860 | 1037 | """ Python 'utf-16-be' Codec
Written by Marc-Andre Lemburg ([email protected]).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""
import codecs
### Codec APIs
encode = codecs.utf_16_be_encode
def decode(input, errors='strict'):
return codecs.utf_16_be_decode(input, errors, True)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.utf_16_be_encode(input, self.errors)[0]
class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
_buffer_decode = codecs.utf_16_be_decode
class StreamWriter(codecs.StreamWriter):
encode = codecs.utf_16_be_encode
class StreamReader(codecs.StreamReader):
decode = codecs.utf_16_be_decode
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='utf-16-be',
encode=encode,
decode=decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| mpl-2.0 |
fucxy/ESPython | SDK/ESP8266_NONOS_SDK/tools/make_cert.py | 6 | 1411 | import os
class Cert(object):
def __init__(self, name, buff):
self.name = name
self.len = len(buff)
self.buff = buff
pass
def __str__(self):
out_str = ['\0']*32
for i in range(len(self.name)):
out_str[i] = self.name[i]
out_str = "".join(out_str)
out_str += str(chr(self.len & 0xFF))
out_str += str(chr((self.len & 0xFF00) >> 8))
out_str += self.buff
return out_str
pass
def main():
cert_list = []
file_list = os.listdir(os.getcwd())
cert_file_list = []
for _file in file_list:
pos = _file.find(".key_1024")
if pos != -1:
cert_file_list.append(_file[:pos])
pos = _file.find(".cer")
if pos!= -1:
cert_file_list.append(_file[:pos])
for cert_file in cert_file_list:
if cert_file == 'private_key':
with open(cert_file+".key_1024", 'rb') as f:
buff = f.read()
cert_list.append(Cert(cert_file, buff))
if cert_file == 'certificate':
with open(cert_file+".cer", 'rb') as f:
buff = f.read()
cert_list.append(Cert(cert_file, buff))
with open('esp_cert_private_key.bin', 'wb+') as f:
for _cert in cert_list:
f.write("%s" % _cert)
pass
if __name__ == '__main__':
main()
| mit |
mskrzypkows/servo | tests/wpt/web-platform-tests/webdriver/user_input/sendkeys_test.py | 141 | 3188 | import os
import sys
import random
import unittest
sys.path.insert(1, os.path.abspath(os.path.join(__file__, "../..")))
import base_test
repo_root = os.path.abspath(os.path.join(__file__, "../../.."))
sys.path.insert(1, os.path.join(repo_root, "tools", "webdriver"))
from webdriver import exceptions
class SendKeysTest(base_test.WebDriverBaseTest):
def setUp(self):
self.driver.get(self.webserver.where_is("user_input/res/text-form.html"))
def test_send_simple_string(self):
element = self.driver.find_element_by_id("Text1")
element.send_keys("lorem ipsum")
self.assertEquals(self.driver.find_element_by_id("text").get_text(), u"lorem ipsum")
def test_send_return(self):
element = self.driver.find_element_by_id("Text1")
returnkey = unichr(int("E006", 16))
element.send_keys([returnkey])
self.assertEquals(u"" + self.driver.get_current_url(), u"" + self.webserver.where_is("user_input/res/text-form-landing.html?e=mc2"))
def test_send_backspace(self):
element = self.driver.find_element_by_id("Text1")
element.send_keys("world ")
element.send_keys("wide ")
element.send_keys("web ")
element.send_keys("consortium")
backspace= unichr(int("E003", 16))
for i in range(0, 11):
element.send_keys([backspace])
self.assertEquals(self.driver.find_element_by_id("text").get_text(), u"world wide web")
def test_send_tab(self):
element1 = self.driver.find_element_by_id("Text1")
element2 = self.driver.find_element_by_id("Text2")
element1.send_keys("typing here")
tab= unichr(int("E004", 16))
element1.send_keys([tab])
output = self.driver.find_element_by_id("output")
tab_pressed = output.get_attribute("checked")
self.assertEquals(tab_pressed, u"true")
def test_send_shift(self):
element = self.driver.find_element_by_id("Text1")
element.send_keys("low ")
shift= unichr(int("E008", 16))
element.send_keys([shift , "u", "p", shift])
self.assertEquals(self.driver.find_element_by_id("text").get_text(), u"low UP")
def test_send_arrow_keys(self):
element = self.driver.find_element_by_id("Text1")
element.send_keys("internet")
backspace= unichr(int("E003", 16))
left= unichr(int("E012", 16))
right= unichr(int("E014", 16))
for i in range(0, 4):
element.send_keys([left])
element.send_keys([backspace])
element.send_keys([right])
element.send_keys("a")
self.assertEquals(self.driver.find_element_by_id("text").get_text(), u"intranet")
def test_select_text_with_shift(self):
element = self.driver.find_element_by_id("Text1")
element.send_keys("WebDriver")
backspace= unichr(int("E003", 16))
shift= unichr(int("E008", 16))
left= unichr(int("E012", 16))
element.send_keys([shift, left, left, left, left, left, left, backspace])
self.assertEquals(self.driver.find_element_by_id("text").get_text(), u"Web")
if __name__ == "__main__":
unittest.main()
| mpl-2.0 |
ktosiek/spacewalk | proxy/proxy/rhnConstants.py | 3 | 1463 | #!/usr/bin/python
#
# Copyright (c) 2008--2013 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
##
# rhnDefines.py - Constants used throughout the Spacewalk Proxy.
#-----------------------------------------------------------------------------
#
"""Constants used by the Spacewalk Proxy"""
# HTTP Headers
HEADER_ACTUAL_URI = 'X-RHN-ActualURI'
HEADER_EFFECTIVE_URI = 'X-RHN-EffectiveURI'
HEADER_CHECKSUM = 'X-RHN-Checksum'
HEADER_LOCATION = 'Location'
HEADER_CONTENT_LENGTH = 'Content-Length'
HEADER_RHN_REDIRECT = 'X-RHN-Redirect'
HEADER_RHN_ORIG_LOC = 'X-RHN-OriginalLocation'
# HTTP Schemes
SCHEME_HTTP = 'http'
SCHEME_HTTPS = 'https'
# These help us match URIs when kickstarting through a Proxy.
URI_PREFIX_KS = '/ty/'
URI_PREFIX_KS_CHECKSUM = '/ty-cksm/'
# Component Constants
COMPONENT_BROKER = 'proxy.broker'
COMPONENT_REDIRECT = 'proxy.redirect'
| gpl-2.0 |
makermade/arm_android-21_arm-linux-androideabi-4.8 | lib/python2.7/nntplib.py | 157 | 21135 | """An NNTP client class based on RFC 977: Network News Transfer Protocol.
Example:
>>> from nntplib import NNTP
>>> s = NNTP('news')
>>> resp, count, first, last, name = s.group('comp.lang.python')
>>> print 'Group', name, 'has', count, 'articles, range', first, 'to', last
Group comp.lang.python has 51 articles, range 5770 to 5821
>>> resp, subs = s.xhdr('subject', first + '-' + last)
>>> resp = s.quit()
>>>
Here 'resp' is the server response line.
Error responses are turned into exceptions.
To post an article from a file:
>>> f = open(filename, 'r') # file containing article, including header
>>> resp = s.post(f)
>>>
For descriptions of all methods, read the comments in the code below.
Note that all arguments and return values representing article numbers
are strings, not numbers, since they are rarely used for calculations.
"""
# RFC 977 by Brian Kantor and Phil Lapsley.
# xover, xgtitle, xpath, date methods by Kevan Heydon
# Imports
import re
import socket
__all__ = ["NNTP","NNTPReplyError","NNTPTemporaryError",
"NNTPPermanentError","NNTPProtocolError","NNTPDataError",
"error_reply","error_temp","error_perm","error_proto",
"error_data",]
# Exceptions raised when an error or invalid response is received
class NNTPError(Exception):
"""Base class for all nntplib exceptions"""
def __init__(self, *args):
Exception.__init__(self, *args)
try:
self.response = args[0]
except IndexError:
self.response = 'No response given'
class NNTPReplyError(NNTPError):
"""Unexpected [123]xx reply"""
pass
class NNTPTemporaryError(NNTPError):
"""4xx errors"""
pass
class NNTPPermanentError(NNTPError):
"""5xx errors"""
pass
class NNTPProtocolError(NNTPError):
"""Response does not begin with [1-5]"""
pass
class NNTPDataError(NNTPError):
"""Error in response data"""
pass
# for backwards compatibility
error_reply = NNTPReplyError
error_temp = NNTPTemporaryError
error_perm = NNTPPermanentError
error_proto = NNTPProtocolError
error_data = NNTPDataError
# Standard port used by NNTP servers
NNTP_PORT = 119
# Response numbers that are followed by additional text (e.g. article)
LONGRESP = ['100', '215', '220', '221', '222', '224', '230', '231', '282']
# Line terminators (we always output CRLF, but accept any of CRLF, CR, LF)
CRLF = '\r\n'
# The class itself
class NNTP:
def __init__(self, host, port=NNTP_PORT, user=None, password=None,
readermode=None, usenetrc=True):
"""Initialize an instance. Arguments:
- host: hostname to connect to
- port: port to connect to (default the standard NNTP port)
- user: username to authenticate with
- password: password to use with username
- readermode: if true, send 'mode reader' command after
connecting.
readermode is sometimes necessary if you are connecting to an
NNTP server on the local machine and intend to call
reader-specific commands, such as `group'. If you get
unexpected NNTPPermanentErrors, you might need to set
readermode.
"""
self.host = host
self.port = port
self.sock = socket.create_connection((host, port))
self.file = self.sock.makefile('rb')
self.debugging = 0
self.welcome = self.getresp()
# 'mode reader' is sometimes necessary to enable 'reader' mode.
# However, the order in which 'mode reader' and 'authinfo' need to
# arrive differs between some NNTP servers. Try to send
# 'mode reader', and if it fails with an authorization failed
# error, try again after sending authinfo.
readermode_afterauth = 0
if readermode:
try:
self.welcome = self.shortcmd('mode reader')
except NNTPPermanentError:
# error 500, probably 'not implemented'
pass
except NNTPTemporaryError, e:
if user and e.response[:3] == '480':
# Need authorization before 'mode reader'
readermode_afterauth = 1
else:
raise
# If no login/password was specified, try to get them from ~/.netrc
# Presume that if .netc has an entry, NNRP authentication is required.
try:
if usenetrc and not user:
import netrc
credentials = netrc.netrc()
auth = credentials.authenticators(host)
if auth:
user = auth[0]
password = auth[2]
except IOError:
pass
# Perform NNRP authentication if needed.
if user:
resp = self.shortcmd('authinfo user '+user)
if resp[:3] == '381':
if not password:
raise NNTPReplyError(resp)
else:
resp = self.shortcmd(
'authinfo pass '+password)
if resp[:3] != '281':
raise NNTPPermanentError(resp)
if readermode_afterauth:
try:
self.welcome = self.shortcmd('mode reader')
except NNTPPermanentError:
# error 500, probably 'not implemented'
pass
# Get the welcome message from the server
# (this is read and squirreled away by __init__()).
# If the response code is 200, posting is allowed;
# if it 201, posting is not allowed
def getwelcome(self):
"""Get the welcome message from the server
(this is read and squirreled away by __init__()).
If the response code is 200, posting is allowed;
if it 201, posting is not allowed."""
if self.debugging: print '*welcome*', repr(self.welcome)
return self.welcome
def set_debuglevel(self, level):
"""Set the debugging level. Argument 'level' means:
0: no debugging output (default)
1: print commands and responses but not body text etc.
2: also print raw lines read and sent before stripping CR/LF"""
self.debugging = level
debug = set_debuglevel
def putline(self, line):
"""Internal: send one line to the server, appending CRLF."""
line = line + CRLF
if self.debugging > 1: print '*put*', repr(line)
self.sock.sendall(line)
def putcmd(self, line):
"""Internal: send one command to the server (through putline())."""
if self.debugging: print '*cmd*', repr(line)
self.putline(line)
def getline(self):
"""Internal: return one line from the server, stripping CRLF.
Raise EOFError if the connection is closed."""
line = self.file.readline()
if self.debugging > 1:
print '*get*', repr(line)
if not line: raise EOFError
if line[-2:] == CRLF: line = line[:-2]
elif line[-1:] in CRLF: line = line[:-1]
return line
def getresp(self):
"""Internal: get a response from the server.
Raise various errors if the response indicates an error."""
resp = self.getline()
if self.debugging: print '*resp*', repr(resp)
c = resp[:1]
if c == '4':
raise NNTPTemporaryError(resp)
if c == '5':
raise NNTPPermanentError(resp)
if c not in '123':
raise NNTPProtocolError(resp)
return resp
def getlongresp(self, file=None):
"""Internal: get a response plus following text from the server.
Raise various errors if the response indicates an error."""
openedFile = None
try:
# If a string was passed then open a file with that name
if isinstance(file, str):
openedFile = file = open(file, "w")
resp = self.getresp()
if resp[:3] not in LONGRESP:
raise NNTPReplyError(resp)
list = []
while 1:
line = self.getline()
if line == '.':
break
if line[:2] == '..':
line = line[1:]
if file:
file.write(line + "\n")
else:
list.append(line)
finally:
# If this method created the file, then it must close it
if openedFile:
openedFile.close()
return resp, list
def shortcmd(self, line):
"""Internal: send a command and get the response."""
self.putcmd(line)
return self.getresp()
def longcmd(self, line, file=None):
"""Internal: send a command and get the response plus following text."""
self.putcmd(line)
return self.getlongresp(file)
def newgroups(self, date, time, file=None):
"""Process a NEWGROUPS command. Arguments:
- date: string 'yymmdd' indicating the date
- time: string 'hhmmss' indicating the time
Return:
- resp: server response if successful
- list: list of newsgroup names"""
return self.longcmd('NEWGROUPS ' + date + ' ' + time, file)
def newnews(self, group, date, time, file=None):
"""Process a NEWNEWS command. Arguments:
- group: group name or '*'
- date: string 'yymmdd' indicating the date
- time: string 'hhmmss' indicating the time
Return:
- resp: server response if successful
- list: list of message ids"""
cmd = 'NEWNEWS ' + group + ' ' + date + ' ' + time
return self.longcmd(cmd, file)
def list(self, file=None):
"""Process a LIST command. Return:
- resp: server response if successful
- list: list of (group, last, first, flag) (strings)"""
resp, list = self.longcmd('LIST', file)
for i in range(len(list)):
# Parse lines into "group last first flag"
list[i] = tuple(list[i].split())
return resp, list
def description(self, group):
"""Get a description for a single group. If more than one
group matches ('group' is a pattern), return the first. If no
group matches, return an empty string.
This elides the response code from the server, since it can
only be '215' or '285' (for xgtitle) anyway. If the response
code is needed, use the 'descriptions' method.
NOTE: This neither checks for a wildcard in 'group' nor does
it check whether the group actually exists."""
resp, lines = self.descriptions(group)
if len(lines) == 0:
return ""
else:
return lines[0][1]
def descriptions(self, group_pattern):
"""Get descriptions for a range of groups."""
line_pat = re.compile("^(?P<group>[^ \t]+)[ \t]+(.*)$")
# Try the more std (acc. to RFC2980) LIST NEWSGROUPS first
resp, raw_lines = self.longcmd('LIST NEWSGROUPS ' + group_pattern)
if resp[:3] != "215":
# Now the deprecated XGTITLE. This either raises an error
# or succeeds with the same output structure as LIST
# NEWSGROUPS.
resp, raw_lines = self.longcmd('XGTITLE ' + group_pattern)
lines = []
for raw_line in raw_lines:
match = line_pat.search(raw_line.strip())
if match:
lines.append(match.group(1, 2))
return resp, lines
def group(self, name):
"""Process a GROUP command. Argument:
- group: the group name
Returns:
- resp: server response if successful
- count: number of articles (string)
- first: first article number (string)
- last: last article number (string)
- name: the group name"""
resp = self.shortcmd('GROUP ' + name)
if resp[:3] != '211':
raise NNTPReplyError(resp)
words = resp.split()
count = first = last = 0
n = len(words)
if n > 1:
count = words[1]
if n > 2:
first = words[2]
if n > 3:
last = words[3]
if n > 4:
name = words[4].lower()
return resp, count, first, last, name
def help(self, file=None):
"""Process a HELP command. Returns:
- resp: server response if successful
- list: list of strings"""
return self.longcmd('HELP',file)
def statparse(self, resp):
"""Internal: parse the response of a STAT, NEXT or LAST command."""
if resp[:2] != '22':
raise NNTPReplyError(resp)
words = resp.split()
nr = 0
id = ''
n = len(words)
if n > 1:
nr = words[1]
if n > 2:
id = words[2]
return resp, nr, id
def statcmd(self, line):
"""Internal: process a STAT, NEXT or LAST command."""
resp = self.shortcmd(line)
return self.statparse(resp)
def stat(self, id):
"""Process a STAT command. Argument:
- id: article number or message id
Returns:
- resp: server response if successful
- nr: the article number
- id: the message id"""
return self.statcmd('STAT ' + id)
def next(self):
"""Process a NEXT command. No arguments. Return as for STAT."""
return self.statcmd('NEXT')
def last(self):
"""Process a LAST command. No arguments. Return as for STAT."""
return self.statcmd('LAST')
def artcmd(self, line, file=None):
"""Internal: process a HEAD, BODY or ARTICLE command."""
resp, list = self.longcmd(line, file)
resp, nr, id = self.statparse(resp)
return resp, nr, id, list
def head(self, id):
"""Process a HEAD command. Argument:
- id: article number or message id
Returns:
- resp: server response if successful
- nr: article number
- id: message id
- list: the lines of the article's header"""
return self.artcmd('HEAD ' + id)
def body(self, id, file=None):
"""Process a BODY command. Argument:
- id: article number or message id
- file: Filename string or file object to store the article in
Returns:
- resp: server response if successful
- nr: article number
- id: message id
- list: the lines of the article's body or an empty list
if file was used"""
return self.artcmd('BODY ' + id, file)
def article(self, id):
"""Process an ARTICLE command. Argument:
- id: article number or message id
Returns:
- resp: server response if successful
- nr: article number
- id: message id
- list: the lines of the article"""
return self.artcmd('ARTICLE ' + id)
def slave(self):
"""Process a SLAVE command. Returns:
- resp: server response if successful"""
return self.shortcmd('SLAVE')
def xhdr(self, hdr, str, file=None):
"""Process an XHDR command (optional server extension). Arguments:
- hdr: the header type (e.g. 'subject')
- str: an article nr, a message id, or a range nr1-nr2
Returns:
- resp: server response if successful
- list: list of (nr, value) strings"""
pat = re.compile('^([0-9]+) ?(.*)\n?')
resp, lines = self.longcmd('XHDR ' + hdr + ' ' + str, file)
for i in range(len(lines)):
line = lines[i]
m = pat.match(line)
if m:
lines[i] = m.group(1, 2)
return resp, lines
def xover(self, start, end, file=None):
"""Process an XOVER command (optional server extension) Arguments:
- start: start of range
- end: end of range
Returns:
- resp: server response if successful
- list: list of (art-nr, subject, poster, date,
id, references, size, lines)"""
resp, lines = self.longcmd('XOVER ' + start + '-' + end, file)
xover_lines = []
for line in lines:
elem = line.split("\t")
try:
xover_lines.append((elem[0],
elem[1],
elem[2],
elem[3],
elem[4],
elem[5].split(),
elem[6],
elem[7]))
except IndexError:
raise NNTPDataError(line)
return resp,xover_lines
def xgtitle(self, group, file=None):
"""Process an XGTITLE command (optional server extension) Arguments:
- group: group name wildcard (i.e. news.*)
Returns:
- resp: server response if successful
- list: list of (name,title) strings"""
line_pat = re.compile("^([^ \t]+)[ \t]+(.*)$")
resp, raw_lines = self.longcmd('XGTITLE ' + group, file)
lines = []
for raw_line in raw_lines:
match = line_pat.search(raw_line.strip())
if match:
lines.append(match.group(1, 2))
return resp, lines
def xpath(self,id):
"""Process an XPATH command (optional server extension) Arguments:
- id: Message id of article
Returns:
resp: server response if successful
path: directory path to article"""
resp = self.shortcmd("XPATH " + id)
if resp[:3] != '223':
raise NNTPReplyError(resp)
try:
[resp_num, path] = resp.split()
except ValueError:
raise NNTPReplyError(resp)
else:
return resp, path
def date (self):
"""Process the DATE command. Arguments:
None
Returns:
resp: server response if successful
date: Date suitable for newnews/newgroups commands etc.
time: Time suitable for newnews/newgroups commands etc."""
resp = self.shortcmd("DATE")
if resp[:3] != '111':
raise NNTPReplyError(resp)
elem = resp.split()
if len(elem) != 2:
raise NNTPDataError(resp)
date = elem[1][2:8]
time = elem[1][-6:]
if len(date) != 6 or len(time) != 6:
raise NNTPDataError(resp)
return resp, date, time
def post(self, f):
"""Process a POST command. Arguments:
- f: file containing the article
Returns:
- resp: server response if successful"""
resp = self.shortcmd('POST')
# Raises error_??? if posting is not allowed
if resp[0] != '3':
raise NNTPReplyError(resp)
while 1:
line = f.readline()
if not line:
break
if line[-1] == '\n':
line = line[:-1]
if line[:1] == '.':
line = '.' + line
self.putline(line)
self.putline('.')
return self.getresp()
def ihave(self, id, f):
"""Process an IHAVE command. Arguments:
- id: message-id of the article
- f: file containing the article
Returns:
- resp: server response if successful
Note that if the server refuses the article an exception is raised."""
resp = self.shortcmd('IHAVE ' + id)
# Raises error_??? if the server already has it
if resp[0] != '3':
raise NNTPReplyError(resp)
while 1:
line = f.readline()
if not line:
break
if line[-1] == '\n':
line = line[:-1]
if line[:1] == '.':
line = '.' + line
self.putline(line)
self.putline('.')
return self.getresp()
def quit(self):
"""Process a QUIT command and close the socket. Returns:
- resp: server response if successful"""
resp = self.shortcmd('QUIT')
self.file.close()
self.sock.close()
del self.file, self.sock
return resp
# Test retrieval when run as a script.
# Assumption: if there's a local news server, it's called 'news'.
# Assumption: if user queries a remote news server, it's named
# in the environment variable NNTPSERVER (used by slrn and kin)
# and we want readermode off.
if __name__ == '__main__':
import os
newshost = 'news' and os.environ["NNTPSERVER"]
if newshost.find('.') == -1:
mode = 'readermode'
else:
mode = None
s = NNTP(newshost, readermode=mode)
resp, count, first, last, name = s.group('comp.lang.python')
print resp
print 'Group', name, 'has', count, 'articles, range', first, 'to', last
resp, subs = s.xhdr('subject', first + '-' + last)
print resp
for item in subs:
print "%7s %s" % item
resp = s.quit()
print resp
| gpl-2.0 |
datalogics/scons | bin/memoicmp.py | 2 | 2183 | #!/usr/bin/env python
#
# A script to compare the --debug=memoizer output found int
# two different files.
import sys,string
def memoize_output(fname):
mout = {}
lines=filter(lambda words:
len(words) == 5 and
words[1] == 'hits' and words[3] == 'misses',
map(string.split, open(fname,'r').readlines()))
for line in lines:
mout[line[-1]] = ( int(line[0]), int(line[2]) )
return mout
def memoize_cmp(filea, fileb):
ma = memoize_output(filea)
mb = memoize_output(fileb)
print 'All output: %s / %s [delta]'%(filea, fileb)
print '----------HITS---------- ---------MISSES---------'
cfmt='%7d/%-7d [%d]'
ma_o = []
mb_o = []
mab = []
for k in ma.keys():
if k in mb.keys():
if k not in mab:
mab.append(k)
else:
ma_o.append(k)
for k in mb.keys():
if k in ma.keys():
if k not in mab:
mab.append(k)
else:
mb_o.append(k)
mab.sort()
ma_o.sort()
mb_o.sort()
for k in mab:
hits = cfmt%(ma[k][0], mb[k][0], mb[k][0]-ma[k][0])
miss = cfmt%(ma[k][1], mb[k][1], mb[k][1]-ma[k][1])
print '%-24s %-24s %s'%(hits, miss, k)
for k in ma_o:
hits = '%7d/ --'%(ma[k][0])
miss = '%7d/ --'%(ma[k][1])
print '%-24s %-24s %s'%(hits, miss, k)
for k in mb_o:
hits = ' -- /%-7d'%(mb[k][0])
miss = ' -- /%-7d'%(mb[k][1])
print '%-24s %-24s %s'%(hits, miss, k)
print '-'*(24+24+1+20)
if __name__ == "__main__":
if len(sys.argv) != 3:
print """Usage: %s file1 file2
Compares --debug=memomize output from file1 against file2."""%sys.argv[0]
sys.exit(1)
memoize_cmp(sys.argv[1], sys.argv[2])
sys.exit(0)
| mit |
louietsai/python-for-android | python3-alpha/extra_modules/gdata/apps/emailsettings/client.py | 48 | 23375 | #!/usr/bin/python2.4
#
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""EmailSettingsClient simplifies Email Settings API calls.
EmailSettingsClient extends gdata.client.GDClient to ease interaction with
the Google Apps Email Settings API. These interactions include the ability
to create labels, filters, aliases, and update web-clip, forwarding, POP,
IMAP, vacation-responder, signature, language, and general settings, and
retrieve labels, send-as, forwarding, pop, imap, vacation and signature
settings.
"""
__author__ = 'Claudio Cherubino <[email protected]>'
import urllib.request, urllib.parse, urllib.error
import gdata.apps.emailsettings.data
import gdata.client
# Email Settings URI template
# The strings in this template are eventually replaced with the API version,
# Google Apps domain name, username, and settingID, respectively.
EMAIL_SETTINGS_URI_TEMPLATE = '/a/feeds/emailsettings/%s/%s/%s/%s'
# The settingID value for the label requests
SETTING_ID_LABEL = 'label'
# The settingID value for the filter requests
SETTING_ID_FILTER = 'filter'
# The settingID value for the send-as requests
SETTING_ID_SENDAS = 'sendas'
# The settingID value for the webclip requests
SETTING_ID_WEBCLIP = 'webclip'
# The settingID value for the forwarding requests
SETTING_ID_FORWARDING = 'forwarding'
# The settingID value for the POP requests
SETTING_ID_POP = 'pop'
# The settingID value for the IMAP requests
SETTING_ID_IMAP = 'imap'
# The settingID value for the vacation responder requests
SETTING_ID_VACATION_RESPONDER = 'vacation'
# The settingID value for the signature requests
SETTING_ID_SIGNATURE = 'signature'
# The settingID value for the language requests
SETTING_ID_LANGUAGE = 'language'
# The settingID value for the general requests
SETTING_ID_GENERAL = 'general'
# The settingID value for the delegation requests
SETTING_ID_DELEGATION = 'delegation'
# The KEEP action for the email settings
ACTION_KEEP = 'KEEP'
# The ARCHIVE action for the email settings
ACTION_ARCHIVE = 'ARCHIVE'
# The DELETE action for the email settings
ACTION_DELETE = 'DELETE'
# The ALL_MAIL setting for POP enable_for property
POP_ENABLE_FOR_ALL_MAIL = 'ALL_MAIL'
# The MAIL_FROM_NOW_ON setting for POP enable_for property
POP_ENABLE_FOR_MAIL_FROM_NOW_ON = 'MAIL_FROM_NOW_ON'
class EmailSettingsClient(gdata.client.GDClient):
"""Client extension for the Google Email Settings API service.
Attributes:
host: string The hostname for the Email Settings API service.
api_version: string The version of the Email Settings API.
"""
host = 'apps-apis.google.com'
api_version = '2.0'
auth_service = 'apps'
auth_scopes = gdata.gauth.AUTH_SCOPES['apps']
ssl = True
def __init__(self, domain, auth_token=None, **kwargs):
"""Constructs a new client for the Email Settings API.
Args:
domain: string The Google Apps domain with Email Settings.
auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
OAuthToken which authorizes this client to edit the email settings.
kwargs: The other parameters to pass to the gdata.client.GDClient
constructor.
"""
gdata.client.GDClient.__init__(self, auth_token=auth_token, **kwargs)
self.domain = domain
def make_email_settings_uri(self, username, setting_id):
"""Creates the URI for the Email Settings API call.
Using this client's Google Apps domain, create the URI to setup
email settings for the given user in that domain. If params are provided,
append them as GET params.
Args:
username: string The name of the user affected by this setting.
setting_id: string The key of the setting to be configured.
Returns:
A string giving the URI for Email Settings API calls for this client's
Google Apps domain.
"""
if '@' in username:
username, domain = username.split('@', 1)
else:
domain = self.domain
uri = EMAIL_SETTINGS_URI_TEMPLATE % (self.api_version, domain,
username, setting_id)
return uri
MakeEmailSettingsUri = make_email_settings_uri
def create_label(self, username, name, **kwargs):
"""Creates a label with the given properties.
Args:
username: string The name of the user.
name: string The name of the label.
kwargs: The other parameters to pass to gdata.client.GDClient.post().
Returns:
gdata.apps.emailsettings.data.EmailSettingsLabel of the new resource.
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_LABEL)
new_label = gdata.apps.emailsettings.data.EmailSettingsLabel(
uri=uri, name=name)
return self.post(new_label, uri, **kwargs)
CreateLabel = create_label
def retrieve_labels(self, username, **kwargs):
"""Retrieves email labels for the specified username
Args:
username: string The name of the user to get the labels for
Returns:
A gdata.data.GDFeed of the user's email labels
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_LABEL)
return self.GetFeed(
uri,
auth_token=None,
query=None,
desired_class=gdata.apps.emailsettings.data.EmailSettingsLabelFeed,
**kwargs)
RetrieveLabels = retrieve_labels
def delete_label(self, username, label, **kwargs):
"""Delete a label from the specified account.
Args:
username: string Name of the user
label: string Name of the label to be deleted
Returns:
An atom.http_core.HttpResponse() with the result of the request
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_LABEL)
uri = '/'.join([uri, urllib.parse.quote_plus(label)])
return self.delete(uri, **kwargs)
DeleteLabel = delete_label
def create_filter(self, username, from_address=None,
to_address=None, subject=None, has_the_word=None,
does_not_have_the_word=None, has_attachments=None,
label=None, mark_as_read=None, archive=None, **kwargs):
"""Creates a filter with the given properties.
Args:
username: string The name of the user.
from_address: string The source email address for the filter.
to_address: string (optional) The destination email address for
the filter.
subject: string (optional) The value the email must have in its
subject to be filtered.
has_the_word: string (optional) The value the email must have
in its subject or body to be filtered.
does_not_have_the_word: string (optional) The value the email
cannot have in its subject or body to be filtered.
has_attachments: string (optional) A boolean string representing
whether the email must have an attachment to be filtered.
label: string (optional) The name of the label to apply to
messages matching the filter criteria.
mark_as_read: Boolean (optional) Whether or not to mark
messages matching the filter criteria as read.
archive: Boolean (optional) Whether or not to move messages
matching to Archived state.
kwargs: The other parameters to pass to gdata.client.GDClient.post().
Returns:
gdata.apps.emailsettings.data.EmailSettingsFilter of the new resource.
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_FILTER)
new_filter = gdata.apps.emailsettings.data.EmailSettingsFilter(
uri=uri, from_address=from_address,
to_address=to_address, subject=subject,
has_the_word=has_the_word,
does_not_have_the_word=does_not_have_the_word,
has_attachments=has_attachments, label=label,
mark_as_read=mark_as_read, archive=archive)
return self.post(new_filter, uri, **kwargs)
CreateFilter = create_filter
def create_send_as(self, username, name, address, reply_to=None,
make_default=None, **kwargs):
"""Creates a send-as alias with the given properties.
Args:
username: string The name of the user.
name: string The name that will appear in the "From" field.
address: string The email address that appears as the
origination address for emails sent by this user.
reply_to: string (optional) The address to be used as the reply-to
address in email sent using the alias.
make_default: Boolean (optional) Whether or not this alias should
become the default alias for this user.
kwargs: The other parameters to pass to gdata.client.GDClient.post().
Returns:
gdata.apps.emailsettings.data.EmailSettingsSendAsAlias of the
new resource.
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_SENDAS)
new_alias = gdata.apps.emailsettings.data.EmailSettingsSendAsAlias(
uri=uri, name=name, address=address,
reply_to=reply_to, make_default=make_default)
return self.post(new_alias, uri, **kwargs)
CreateSendAs = create_send_as
def retrieve_send_as(self, username, **kwargs):
"""Retrieves send-as aliases for the specified username
Args:
username: string The name of the user to get the send-as for
Returns:
A gdata.data.GDFeed of the user's send-as alias settings
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_SENDAS)
return self.GetFeed(
uri,
auth_token=None,
query=None,
desired_class=gdata.apps.emailsettings.data.EmailSettingsSendAsAliasFeed,
**kwargs)
RetrieveSendAs = retrieve_send_as
def update_webclip(self, username, enable, **kwargs):
"""Enable/Disable Google Mail web clip.
Args:
username: string The name of the user.
enable: Boolean Whether to enable showing Web clips.
kwargs: The other parameters to pass to the update method.
Returns:
gdata.apps.emailsettings.data.EmailSettingsWebClip of the
updated resource.
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_WEBCLIP)
new_webclip = gdata.apps.emailsettings.data.EmailSettingsWebClip(
uri=uri, enable=enable)
return self.update(new_webclip, **kwargs)
UpdateWebclip = update_webclip
def update_forwarding(self, username, enable, forward_to=None,
action=None, **kwargs):
"""Update Google Mail Forwarding settings.
Args:
username: string The name of the user.
enable: Boolean Whether to enable incoming email forwarding.
forward_to: (optional) string The address email will be forwarded to.
action: string (optional) The action to perform after forwarding
an email (ACTION_KEEP, ACTION_ARCHIVE, ACTION_DELETE).
kwargs: The other parameters to pass to the update method.
Returns:
gdata.apps.emailsettings.data.EmailSettingsForwarding of the
updated resource
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_FORWARDING)
new_forwarding = gdata.apps.emailsettings.data.EmailSettingsForwarding(
uri=uri, enable=enable, forward_to=forward_to, action=action)
return self.update(new_forwarding, **kwargs)
UpdateForwarding = update_forwarding
def retrieve_forwarding(self, username, **kwargs):
"""Retrieves forwarding settings for the specified username
Args:
username: string The name of the user to get the forwarding settings for
Returns:
A gdata.data.GDEntry of the user's email forwarding settings
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_FORWARDING)
return self.GetEntry(
uri,
auth_token=None,
query=None,
desired_class=gdata.apps.emailsettings.data.EmailSettingsForwarding,
**kwargs)
RetrieveForwarding = retrieve_forwarding
def update_pop(self, username, enable, enable_for=None, action=None,
**kwargs):
"""Update Google Mail POP settings.
Args:
username: string The name of the user.
enable: Boolean Whether to enable incoming POP3 access.
enable_for: string (optional) Whether to enable POP3 for all mail
(POP_ENABLE_FOR_ALL_MAIL), or mail from now on
(POP_ENABLE_FOR_MAIL_FROM_NOW_ON).
action: string (optional) What Google Mail should do with its copy
of the email after it is retrieved using POP (ACTION_KEEP,
ACTION_ARCHIVE, ACTION_DELETE).
kwargs: The other parameters to pass to the update method.
Returns:
gdata.apps.emailsettings.data.EmailSettingsPop of the updated resource.
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_POP)
new_pop = gdata.apps.emailsettings.data.EmailSettingsPop(
uri=uri, enable=enable,
enable_for=enable_for, action=action)
return self.update(new_pop, **kwargs)
UpdatePop = update_pop
def retrieve_pop(self, username, **kwargs):
"""Retrieves POP settings for the specified username
Args:
username: string The name of the user to get the POP settings for
Returns:
A gdata.data.GDEntry of the user's POP settings
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_POP)
return self.GetEntry(
uri,
auth_token=None,
query=None,
desired_class=gdata.apps.emailsettings.data.EmailSettingsPop,
**kwargs)
RetrievePop = retrieve_pop
def update_imap(self, username, enable, **kwargs):
"""Update Google Mail IMAP settings.
Args:
username: string The name of the user.
enable: Boolean Whether to enable IMAP access.language
kwargs: The other parameters to pass to the update method.
Returns:
gdata.apps.emailsettings.data.EmailSettingsImap of the updated resource.
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_IMAP)
new_imap = gdata.apps.emailsettings.data.EmailSettingsImap(
uri=uri, enable=enable)
return self.update(new_imap, **kwargs)
UpdateImap = update_imap
def retrieve_imap(self, username, **kwargs):
"""Retrieves imap settings for the specified username
Args:
username: string The name of the user to get the imap settings for
Returns:
A gdata.data.GDEntry of the user's IMAP settings
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_IMAP)
return self.GetEntry(
uri,
auth_token=None,
query=None,
desired_class=gdata.apps.emailsettings.data.EmailSettingsImap,
**kwargs)
RetrieveImap = retrieve_imap
def update_vacation(self, username, enable, subject=None, message=None,
start_date=None, end_date=None, contacts_only=None,
domain_only=None, **kwargs):
"""Update Google Mail vacation-responder settings.
Args:
username: string The name of the user.
enable: Boolean Whether to enable the vacation responder.
subject: string (optional) The subject line of the vacation responder
autoresponse.
message: string (optional) The message body of the vacation responder
autoresponse.
startDate: string (optional) The start date of the vacation responder
autoresponse.
endDate: string (optional) The end date of the vacation responder
autoresponse.
contacts_only: Boolean (optional) Whether to only send autoresponses
to known contacts.
domain_only: Boolean (optional) Whether to only send autoresponses
to users in the primary domain.
kwargs: The other parameters to pass to the update method.
Returns:
gdata.apps.emailsettings.data.EmailSettingsVacationResponder of the
updated resource.
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_VACATION_RESPONDER)
new_vacation = gdata.apps.emailsettings.data.EmailSettingsVacationResponder(
uri=uri, enable=enable, subject=subject,
message=message, start_date=start_date, end_date=end_date,
contacts_only=contacts_only, domain_only=domain_only)
return self.update(new_vacation, **kwargs)
UpdateVacation = update_vacation
def retrieve_vacation(self, username, **kwargs):
"""Retrieves vacation settings for the specified username
Args:
username: string The name of the user to get the vacation settings for
Returns:
A gdata.data.GDEntry of the user's vacation auto-responder settings
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_VACATION_RESPONDER)
return self.GetEntry(
uri,
auth_token=None,
query=None,
desired_class=
gdata.apps.emailsettings.data.EmailSettingsVacationResponder,
**kwargs)
RetrieveVacation = retrieve_vacation
def update_signature(self, username, signature, **kwargs):
"""Update Google Mail signature.
Args:
username: string The name of the user.
signature: string The signature to be appended to outgoing messages.
kwargs: The other parameters to pass to the update method.
Returns:
gdata.apps.emailsettings.data.EmailSettingsSignature of the
updated resource.
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_SIGNATURE)
new_signature = gdata.apps.emailsettings.data.EmailSettingsSignature(
uri=uri, signature=signature)
return self.update(new_signature, **kwargs)
UpdateSignature = update_signature
def retrieve_signature(self, username, **kwargs):
"""Retrieves signature settings for the specified username
Args:
username: string The name of the user to get the signature settings for
Returns:
A gdata.data.GDEntry of the user's signature settings
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_SIGNATURE)
return self.GetEntry(
uri,
auth_token=None,
query=None,
desired_class=gdata.apps.emailsettings.data.EmailSettingsSignature,
**kwargs)
RetrieveSignature = retrieve_signature
def update_language(self, username, language, **kwargs):
"""Update Google Mail language settings.
Args:
username: string The name of the user.
language: string The language tag for Google Mail's display language.
kwargs: The other parameters to pass to the update method.
Returns:
gdata.apps.emailsettings.data.EmailSettingsLanguage of the
updated resource.
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_LANGUAGE)
new_language = gdata.apps.emailsettings.data.EmailSettingsLanguage(
uri=uri, language=language)
return self.update(new_language, **kwargs)
UpdateLanguage = update_language
def update_general_settings(self, username, page_size=None, shortcuts=None,
arrows=None, snippets=None, use_unicode=None,
**kwargs):
"""Update Google Mail general settings.
Args:
username: string The name of the user.
page_size: int (optional) The number of conversations to be shown per
page.
shortcuts: Boolean (optional) Whether to enable keyboard shortcuts.
arrows: Boolean (optional) Whether to display arrow-shaped personal
indicators next to email sent specifically to the user.
snippets: Boolean (optional) Whether to display snippets of the messages
in the inbox and when searching.
use_unicode: Boolean (optional) Whether to use UTF-8 (unicode) encoding
for all outgoing messages.
kwargs: The other parameters to pass to the update method.
Returns:
gdata.apps.emailsettings.data.EmailSettingsGeneral of the
updated resource.
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_GENERAL)
new_general = gdata.apps.emailsettings.data.EmailSettingsGeneral(
uri=uri, page_size=page_size, shortcuts=shortcuts,
arrows=arrows, snippets=snippets, use_unicode=use_unicode)
return self.update(new_general, **kwargs)
UpdateGeneralSettings = update_general_settings
def add_email_delegate(self, username, address, **kwargs):
"""Add an email delegate to the mail account
Args:
username: string The name of the user
address: string The email address of the delegated account
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_DELEGATION)
new_delegation = gdata.apps.emailsettings.data.EmailSettingsDelegation(
uri=uri, address=address)
return self.post(new_delegation, uri, **kwargs)
AddEmailDelegate = add_email_delegate
def retrieve_email_delegates(self, username, **kwargs):
"""Retrieve a feed of the email delegates for the specified username
Args:
username: string The name of the user to get the email delegates for
Returns:
A gdata.data.GDFeed of the user's email delegates
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_DELEGATION)
return self.GetFeed(
uri,
auth_token=None,
query=None,
desired_class=gdata.apps.emailsettings.data.EmailSettingsDelegationFeed,
**kwargs)
RetrieveEmailDelegates = retrieve_email_delegates
def delete_email_delegate(self, username, address, **kwargs):
"""Delete an email delegate from the specified account
Args:
username: string The name of the user
address: string The email address of the delegated account
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_DELEGATION)
uri = uri + '/' + address
return self.delete(uri, **kwargs)
DeleteEmailDelegate = delete_email_delegate
| apache-2.0 |
diegoguimaraes/django | django/core/mail/backends/filebased.py | 35 | 2765 | """Email backend that writes messages to a file."""
import datetime
import os
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.mail.backends.console import EmailBackend as ConsoleEmailBackend
from django.utils import six
class EmailBackend(ConsoleEmailBackend):
def __init__(self, *args, **kwargs):
self._fname = None
if 'file_path' in kwargs:
self.file_path = kwargs.pop('file_path')
else:
self.file_path = getattr(settings, 'EMAIL_FILE_PATH', None)
# Make sure self.file_path is a string.
if not isinstance(self.file_path, six.string_types):
raise ImproperlyConfigured('Path for saving emails is invalid: %r' % self.file_path)
self.file_path = os.path.abspath(self.file_path)
# Make sure that self.file_path is an directory if it exists.
if os.path.exists(self.file_path) and not os.path.isdir(self.file_path):
raise ImproperlyConfigured(
'Path for saving email messages exists, but is not a directory: %s' % self.file_path
)
# Try to create it, if it not exists.
elif not os.path.exists(self.file_path):
try:
os.makedirs(self.file_path)
except OSError as err:
raise ImproperlyConfigured(
'Could not create directory for saving email messages: %s (%s)' % (self.file_path, err)
)
# Make sure that self.file_path is writable.
if not os.access(self.file_path, os.W_OK):
raise ImproperlyConfigured('Could not write to directory: %s' % self.file_path)
# Finally, call super().
# Since we're using the console-based backend as a base,
# force the stream to be None, so we don't default to stdout
kwargs['stream'] = None
super(EmailBackend, self).__init__(*args, **kwargs)
def write_message(self, message):
self.stream.write(message.message().as_bytes() + b'\n')
self.stream.write(b'-' * 79)
self.stream.write(b'\n')
def _get_filename(self):
"""Return a unique file name."""
if self._fname is None:
timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
fname = "%s-%s.log" % (timestamp, abs(id(self)))
self._fname = os.path.join(self.file_path, fname)
return self._fname
def open(self):
if self.stream is None:
self.stream = open(self._get_filename(), 'ab')
return True
return False
def close(self):
try:
if self.stream is not None:
self.stream.close()
finally:
self.stream = None
| bsd-3-clause |
repotvsupertuga/tvsupertuga.repository | script.module.universalscrapers/lib/universalscrapers/common.py | 2 | 16257 | import HTMLParser
import json
import random
import re
import urllib2
import urlparse
import requests,os,time
import xbmc,xbmcaddon
USERDATA_PATH = xbmc.translatePath('special://home/userdata/addon_data')
ADDON_DATA = os.path.join(USERDATA_PATH,'script.module.universalscrapers')
full_file = ADDON_DATA + '/Log.txt'
def clean_title(title):
if title == None: return
title = str(title)
title = re.sub('&#(\d);', '', title)
title = re.sub('(&#[0-9]+)([^;^0-9]+)', '\\1;\\2', title)
title = title.replace('"', '\"').replace('&', '&')
title = re.sub('\n|([[].+?[]])|([(].+?[)])|\s(vs|v[.])\s|(:|;|-|"|,|\'|\_|\.|\?)|\s', '', title)
return title.lower()
def clean_search(title):
if title == None: return
title = title.lower()
title = re.sub('&#(\d+);', '', title)
title = re.sub('(&#[0-9]+)([^;^0-9]+)', '\\1;\\2', title)
title = title.replace('"', '\"').replace('&', '&')
title = re.sub('\\\|/|\(|\)|\[|\]|\{|\}|-|:|;|\*|\?|"|\'|<|>|\_|\.|\?', ' ', title).lower()
title = ' '.join(title.split())
return title
def send_log(name,Time,count,title,year,season = '', episode = ''):
if not os.path.exists(full_file):
full_write = open(full_file,"w")
elif os.path.exists(full_file):
full_write = open(full_file,'a')
if count ==0:
count = 'Check Scraper/NoLinks'
if episode != '':
title = title + '('+year+') : S'+season+' E'+episode
else:
title = title + '('+year+')'
Print = '<######################################################\n# universalscraper: %s' %(str(name))+'\n# Tested with: '+str(title)+'\n# Links returned: %s' %(str(count))+'\n# Time to Complete: %s' %(str(round(Time,2)))+'\n#######################################################>'
full_write.write(Print+'\n')
'''
print '<######################################################'
print '# Tested with: %s' %(str(title))
print '# universalscraper: %s' %(str(name))
print '# Links returned: %s' %(str(count))
print '# Time to Complete: %s' %(str(round(Time,2)))
print '#######################################################>'
return
'''
def Del_LOG():
ADDON_DATA = os.path.join(USERDATA_PATH,'script.module.universalscrapers')
full_file = ADDON_DATA + '/Log.txt'
if os.path.exists(full_file):
os.remove(full_file)
def error_log(name,Txt):
if not os.path.exists(full_file):
full_write = open(full_file,"w")
elif os.path.exists(full_file):
full_write = open(full_file,'a')
Print = ':>>>> Scraper: %s' %(str(name))+'\n:>>>> LogNotice: %s' %(str(Txt))
full_write.write(Print+'\n')
'''
print ':>>>> Scraper: %s' %(str(name))
print ':>>>> LogNotice: %s' %(str(Txt))
return
'''
def random_agent():
BR_VERS = [
['%s.0' % i for i in xrange(18, 43)],
['37.0.2062.103', '37.0.2062.120', '37.0.2062.124', '38.0.2125.101', '38.0.2125.104', '38.0.2125.111',
'39.0.2171.71', '39.0.2171.95', '39.0.2171.99', '40.0.2214.93', '40.0.2214.111',
'40.0.2214.115', '42.0.2311.90', '42.0.2311.135', '42.0.2311.152', '43.0.2357.81', '43.0.2357.124',
'44.0.2403.155', '44.0.2403.157', '45.0.2454.101', '45.0.2454.85', '46.0.2490.71',
'46.0.2490.80', '46.0.2490.86', '47.0.2526.73', '47.0.2526.80'],
['11.0']]
WIN_VERS = ['Windows NT 10.0', 'Windows NT 7.0', 'Windows NT 6.3', 'Windows NT 6.2', 'Windows NT 6.1',
'Windows NT 6.0', 'Windows NT 5.1', 'Windows NT 5.0']
FEATURES = ['; WOW64', '; Win64; IA64', '; Win64; x64', '']
RAND_UAS = ['Mozilla/5.0 ({win_ver}{feature}; rv:{br_ver}) Gecko/20100101 Firefox/{br_ver}',
'Mozilla/5.0 ({win_ver}{feature}) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{br_ver} Safari/537.36',
'Mozilla/5.0 ({win_ver}{feature}; Trident/7.0; rv:{br_ver}) like Gecko']
index = random.randrange(len(RAND_UAS))
return RAND_UAS[index].format(win_ver=random.choice(WIN_VERS), feature=random.choice(FEATURES),
br_ver=random.choice(BR_VERS[index]))
def replaceHTMLCodes(txt):
txt = re.sub("(&#[0-9]+)([^;^0-9]+)", "\\1;\\2", txt)
txt = HTMLParser.HTMLParser().unescape(txt)
txt = txt.replace(""", "\"")
txt = txt.replace("&", "&")
return txt
def vk(url):
try:
try:
oid, id = urlparse.parse_qs(urlparse.urlparse(url).query)['oid'][0], \
urlparse.parse_qs(urlparse.urlparse(url).query)['id'][0]
except:
oid, id = re.compile('\/video(.*)_(.*)').findall(url)[0]
try:
hash = urlparse.parse_qs(urlparse.urlparse(url).query)['hash'][0]
except:
hash = vk_hash(oid, id)
u = 'http://api.vk.com/method/video.getEmbed?oid=%s&video_id=%s&embed_hash=%s' % (oid, id, hash)
headers = {'User-Agent': random_agent()}
request = urllib2.Request(u, headers=headers)
result = urllib2.urlopen(request).read()
result = re.sub(r'[^\x00-\x7F]+', ' ', result)
try:
result = json.loads(result)['response']
except:
result = vk_private(oid, id)
url = []
try:
url += [{'quality': '720', 'url': result['url720']}]
except:
pass
try:
url += [{'quality': '540', 'url': result['url540']}]
except:
pass
try:
url += [{'quality': '480', 'url': result['url480']}]
except:
pass
if not url == []: return url
try:
url += [{'quality': '360', 'url': result['url360']}]
except:
pass
if not url == []: return url
try:
url += [{'quality': '240', 'url': result['url240']}]
except:
pass
if not url == []: return url
except:
return
def vk_hash(oid, id):
try:
url = 'http://vk.com/al_video.php?act=show_inline&al=1&video=%s_%s' % (oid, id)
headers = {'User-Agent': random_agent()}
request = urllib2.Request(url, headers=headers)
result = urllib2.urlopen(request).read()
result = result.replace('\'', '"').replace(' ', '')
hash = re.compile('"hash2":"(.+?)"').findall(result)
hash += re.compile('"hash":"(.+?)"').findall(result)
hash = hash[0]
return hash
except:
return
def vk_private(oid, id):
try:
url = 'http://vk.com/al_video.php?act=show_inline&al=1&video=%s_%s' % (oid, id)
headers = {'User-Agent': random_agent()}
request = urllib2.Request(url, headers=headers)
result = urllib2.urlopen(request).read()
result = re.compile('var vars *= *({.+?});').findall(result)[0]
result = re.sub(r'[^\x00-\x7F]+', ' ', result)
result = json.loads(result)
return result
except:
return
def odnoklassniki(url):
try:
url = re.compile('//.+?/.+?/([\w]+)').findall(url)[0]
url = 'http://ok.ru/dk?cmd=videoPlayerMetadata&mid=%s' % url
headers = {'User-Agent': random_agent()}
request = urllib2.Request(url, headers=headers)
result = urllib2.urlopen(request).read()
result = re.sub(r'[^\x00-\x7F]+', ' ', result)
result = json.loads(result)['videos']
try:
hd = [{'quality': '1080', 'url': i['url']} for i in result if i['name'] == 'full']
except:
pass
try:
hd += [{'quality': 'HD', 'url': i['url']} for i in result if i['name'] == 'hd']
except:
pass
try:
sd = [{'quality': 'SD', 'url': i['url']} for i in result if i['name'] == 'sd']
except:
pass
try:
sd += [{'quality': 'SD', 'url': i['url']} for i in result if i['name'] == 'low']
except:
pass
try:
sd += [{'quality': 'SD', 'url': i['url']} for i in result if i['name'] == 'lowest']
except:
pass
try:
sd += [{'quality': 'SD', 'url': i['url']} for i in result if i['name'] == 'mobile']
except:
pass
url = hd + sd[:1]
if not url == []: return url
except:
return
def googletag(url):
quality = re.compile('itag=(\d*)').findall(url)
quality += re.compile('=m(\d*)$').findall(url)
try:
quality = quality[0]
except:
return []
if quality in ['37', '137', '299', '96', '248', '303', '46']:
return [{'quality': '1080', 'url': url}]
elif quality in ['22', '84', '136', '298', '120', '95', '247', '302', '45', '102']:
return [{'quality': '720', 'url': url}]
elif quality in ['35', '44', '135', '244', '94']:
return [{'quality': '480', 'url': url}]
elif quality in ['18', '34', '43', '82', '100', '101', '134', '243', '93']:
return [{'quality': '480', 'url': url}]
elif quality in ['5', '6', '36', '83', '133', '242', '92', '132']:
return [{'quality': '480', 'url': url}]
else:
return []
def filter_host(host):
if host not in ['example.com', 'allvid.ch', 'anime-portal.org', 'anyfiles.pl',
'www.apnasave.club', 'castamp.com', 'clicknupload.com', 'clicknupload.me',
'clicknupload.link', 'cloud.mail.ru', 'cloudy.ec', 'cloudy.eu', 'cloudy.sx',
'cloudy.ch', 'cloudy.com', 'daclips.in', 'daclips.com', 'dailymotion.com',
'ecostream.tv', 'exashare.com', 'uame8aij4f.com', 'yahmaib3ai.com',
'facebook.com', 'filepup.net', 'fileweed.net', 'flashx.tv', 'googlevideo.com',
'googleusercontent.com', 'get.google.com', 'plus.google.com', 'googledrive.com',
'drive.google.com', 'docs.google.com', 'gorillavid.in', 'gorillavid.com',
'grifthost.com', 'hugefiles.net', 'indavideo.hu', 'kingfiles.net', 'mail.ru',
'my.mail.ru', 'm.my.mail.ru', 'videoapi.my.mail.ru', 'api.video.mail.ru',
'mersalaayitten.com', 'mersalaayitten.co', 'mersalaayitten.us', 'movdivx.com',
'divxme.com', 'movpod.net', 'movpod.in', 'movshare.net', 'wholecloud.net',
'vidgg.to', 'mp4stream.com', 'myvi.ru', 'nosvideo.com', 'noslocker.com',
'novamov.com', 'auroravid.to', 'ok.ru', 'odnoklassniki.ru', 'openload.io',
'openload.co', 'oload.tv', 'playwire.com', 'promptfile.com', 'rapidvideo.com',
'raptu.com', 'rutube.ru', 'videos.sapo.pt', 'speedvideo.net', 'streamcloud.eu',
'streamin.to', 'stream.moe', 'streamplay.to', 'teramixer.com', 'thevid.net',
'thevideo.me', 'toltsd-fel.tk', 'toltsd-fel.xyz', 'trollvid.net', 'trollvid.io',
'mp4edge.com', 'tudou.com', 'tune.pk', 'upload.af', 'uploadx.org', 'uploadz.co',
'uptobox.com', 'uptostream.com', 'veoh.com', 'videa.hu', 'videoget.me',
'videohut.to', 'videoraj.ec', 'videoraj.eu', 'videoraj.sx', 'videoraj.ch',
'videoraj.com', 'videoraj.to', 'videoraj.co', 'bitvid.sx', 'videoweed.es',
'videoweed.com', 'videowood.tv', 'byzoo.org', 'playpanda.net', 'videozoo.me',
'videowing.me', 'easyvideo.me', 'play44.net', 'playbb.me', 'video44.net',
'vidlox.tv', 'vidmad.net', 'tamildrive.com', 'vid.me', 'vidup.me', 'vimeo.com',
'vivo.sx', 'vk.com', 'vshare.eu', 'watchers.to', 'watchonline.to',
'everplay.watchpass.net', 'weshare.me', 'xvidstage.com', 'yourupload.com',
'yucache.net', 'youtube.com', 'youtu.be', 'youtube-nocookie.com',
'youwatch.org', 'chouhaa.info', 'aliez.me', 'ani-stream.com', 'bestream.tv',
'blazefile.co', 'divxstage.eu', 'divxstage.net', 'divxstage.to', 'cloudtime.to',
'downace.com', 'entervideo.net', 'estream.to', 'fastplay.sx', 'fastplay.cc',
'goodvideohost.com', 'jetload.tv', 'letwatch.us', 'letwatch.to', 'vidshare.us',
'megamp4.net', 'mp4engine.com', 'mp4upload.com', 'myvidstream.net',
'nowvideo.eu', 'nowvideo.ch', 'nowvideo.sx', 'nowvideo.co', 'nowvideo.li',
'nowvideo.fo', 'nowvideo.at', 'nowvideo.ec', 'playedto.me', 'www.playhd.video',
'www.playhd.fo', 'putload.tv', 'shitmovie.com', 'rapidvideo.ws',
'speedplay.xyz', 'speedplay.us', 'speedplay1.site', 'speedplay.pw',
'speedplay1.pw', 'speedplay3.pw', 'speedplayy.site', 'speedvid.net',
'spruto.tv', 'stagevu.com', 'streame.net', 'thevideos.tv', 'tusfiles.net',
'userscloud.com', 'usersfiles.com', 'vidabc.com', 'vidcrazy.net',
'uploadcrazy.net', 'thevideobee.to', 'videocloud.co', 'vidfile.net',
'vidhos.com', 'vidto.me', 'vidtodo.com', 'vidup.org', 'vidzi.tv', 'vodlock.co',
'vshare.io', 'watchvideo.us', 'watchvideo2.us', 'watchvideo3.us',
'watchvideo4.us', 'watchvideo5.us', 'watchvideo6.us', 'watchvideo7.us',
'watchvideo8.us', 'watchvideo9.us', 'watchvideo10.us', 'watchvideo11.us',
'watchvideo12.us', 'zstream.to']:
return False
return True
def check_playable(url):
"""
checks if passed url is a live link
:param str url: stream url
:return: playable stream url or None
:rtype: str or None
"""
import urllib
import requests
try:
headers = url.rsplit('|', 1)[1]
except:
headers = ''
headers = urllib.quote_plus(headers).replace('%3D', '=') if ' ' in headers else headers
headers = dict(urlparse.parse_qsl(headers))
result = None
try:
if url.startswith('http') and '.m3u8' in url:
result = requests.head(url.split('|')[0], headers=headers, timeout=5)
if result is None:
return None
elif url.startswith('http'):
result = requests.head(url.split('|')[0], headers=headers, timeout=5)
if result is None:
return None
except:
pass
return result
def get_rd_domains():
import xbmc
import xbmcaddon
import os
try:
from sqlite3 import dbapi2 as database
except:
from pysqlite2 import dbapi2 as database
import datetime
cache_location = os.path.join(
xbmc.translatePath(xbmcaddon.Addon("script.module.universalscrapers").getAddonInfo('profile')).decode('utf-8'),
'url_cache.db')
try:
dbcon = database.connect(cache_location)
dbcur = dbcon.cursor()
try:
dbcur.execute("SELECT * FROM version")
match = dbcur.fetchone()
except:
dbcur.execute("CREATE TABLE version (""version TEXT)")
dbcur.execute("INSERT INTO version Values ('0.5.4')")
dbcon.commit()
dbcur.execute(
"CREATE TABLE IF NOT EXISTS rd_domains (""domains TEXT, ""added TEXT"");")
except Exception as e:
pass
try:
sources = []
dbcur.execute(
"SELECT * FROM rd_domains")
match = dbcur.fetchone()
t1 = int(re.sub('[^0-9]', '', str(match[1])))
t2 = int(datetime.datetime.now().strftime("%Y%m%d%H%M"))
update = abs(t2 - t1) > 60 * 24
if update is False:
sources = json.loads(match[0])
return sources
except Exception as e:
pass
url = 'https://api.real-debrid.com/rest/1.0/hosts/domains'
domains = requests.get(url).json()
try:
dbcur.execute("DELETE FROM rd_domains WHERE added = %s" %(match[1]))
except:
pass
dbcur.execute("INSERT INTO rd_domains Values (?, ?)", (
json.dumps(domains),
datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
dbcon.commit()
return domains
| gpl-2.0 |
Benrflanders/Pytris | pyglet/gl/lib_wgl.py | 41 | 5761 | # ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
'''
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: lib_glx.py 597 2007-02-03 16:13:07Z Alex.Holkner $'
import ctypes
from ctypes import *
import pyglet
from pyglet.gl.lib import missing_function, decorate_function
from pyglet.compat import asbytes
__all__ = ['link_GL', 'link_GLU', 'link_WGL']
_debug_trace = pyglet.options['debug_trace']
gl_lib = ctypes.windll.opengl32
glu_lib = ctypes.windll.glu32
wgl_lib = gl_lib
if _debug_trace:
from pyglet.lib import _TraceLibrary
gl_lib = _TraceLibrary(gl_lib)
glu_lib = _TraceLibrary(glu_lib)
wgl_lib = _TraceLibrary(wgl_lib)
try:
wglGetProcAddress = wgl_lib.wglGetProcAddress
wglGetProcAddress.restype = CFUNCTYPE(POINTER(c_int))
wglGetProcAddress.argtypes = [c_char_p]
_have_get_proc_address = True
except AttributeError:
_have_get_proc_address = False
class WGLFunctionProxy(object):
__slots__ = ['name', 'requires', 'suggestions', 'ftype', 'func']
def __init__(self, name, ftype, requires, suggestions):
assert _have_get_proc_address
self.name = name
self.ftype = ftype
self.requires = requires
self.suggestions = suggestions
self.func = None
def __call__(self, *args, **kwargs):
if self.func:
return self.func(*args, **kwargs)
from pyglet.gl import current_context
if not current_context:
raise Exception(
'Call to function "%s" before GL context created' % self.name)
address = wglGetProcAddress(asbytes(self.name))
if cast(address, POINTER(c_int)): # check cast because address is func
self.func = cast(address, self.ftype)
decorate_function(self.func, self.name)
else:
self.func = missing_function(
self.name, self.requires, self.suggestions)
result = self.func(*args, **kwargs)
return result
def link_GL(name, restype, argtypes, requires=None, suggestions=None):
try:
func = getattr(gl_lib, name)
func.restype = restype
func.argtypes = argtypes
decorate_function(func, name)
return func
except AttributeError:
# Not in opengl32.dll. Try and get a pointer from WGL.
try:
fargs = (restype,) + tuple(argtypes)
ftype = ctypes.WINFUNCTYPE(*fargs)
if _have_get_proc_address:
from pyglet.gl import gl_info
if gl_info.have_context():
address = wglGetProcAddress(name)
if address:
func = cast(address, ftype)
decorate_function(func, name)
return func
else:
# Insert proxy until we have a context
return WGLFunctionProxy(name, ftype, requires, suggestions)
except:
pass
return missing_function(name, requires, suggestions)
def link_GLU(name, restype, argtypes, requires=None, suggestions=None):
try:
func = getattr(glu_lib, name)
func.restype = restype
func.argtypes = argtypes
decorate_function(func, name)
return func
except AttributeError:
# Not in glu32.dll. Try and get a pointer from WGL.
try:
fargs = (restype,) + tuple(argtypes)
ftype = ctypes.WINFUNCTYPE(*fargs)
if _have_get_proc_address:
from pyglet.gl import gl_info
if gl_info.have_context():
address = wglGetProcAddress(name)
if address:
func = cast(address, ftype)
decorate_function(func, name)
return func
else:
# Insert proxy until we have a context
return WGLFunctionProxy(name, ftype, requires, suggestions)
except:
pass
return missing_function(name, requires, suggestions)
link_WGL = link_GL
| mit |
HarmJ0y/sparta | app/settings.py | 1 | 25730 | #!/usr/bin/env python
'''
SPARTA - Network Infrastructure Penetration Testing Tool (http://sparta.secforce.com)
Copyright (c) 2014 SECFORCE (Antonio Quina and Leonidas Stavliotis)
This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import sys, os
from PyQt4 import QtCore, QtGui
from app.auxiliary import * # for timestamp
# this class reads and writes application settings
class AppSettings():
def __init__(self):
# check if settings file exists and creates it if it doesn't
if not os.path.exists('./sparta.conf'):
print '[+] Creating settings file..'
self.createDefaultSettings()
else:
print '[+] Loading settings file..'
self.actions = QtCore.QSettings('./sparta.conf', QtCore.QSettings.NativeFormat)
# This function creates the default settings file. Note that, in general, everything is case sensitive.
# Each action should be in the following format:
#
# (key, [label, command, service])
# key - must be unique within the group and is used to retrieve each action. is used to create the tab titles and also to recognise nmap commands so we can parse the output (case sensitive)
# label - is what appears in the context menu in the gui
# command - command that will be run. These placeholders will be replaced on-the-fly: [IP] [PORT] [OUTPUT]
# service - service(s) to which the tool applies (comma-separated). Leave empty if valid for all services.
def createDefaultSettings(self):
self.actions = QtCore.QSettings('./sparta.conf', QtCore.QSettings.NativeFormat)
self.actions.beginGroup('GeneralSettings')
self.actions.setValue('default-terminal','gnome-terminal')
self.actions.setValue('tool-output-black-background','False')
self.actions.setValue('screenshooter-timeout','15000')
self.actions.setValue('web-services','http,https,ssl,soap,http-proxy,http-alt,https-alt')
self.actions.setValue('enable-scheduler','True')
self.actions.setValue('max-fast-processes', '10')
self.actions.setValue('max-slow-processes', '10')
self.actions.endGroup()
self.actions.beginGroup('BruteSettings')
self.actions.setValue('store-cleartext-passwords-on-exit','True')
self.actions.setValue('username-wordlist-path','/usr/share/wordlists/')
self.actions.setValue('password-wordlist-path','/usr/share/wordlists/')
self.actions.setValue('default-username','root')
self.actions.setValue('default-password','password')
self.actions.setValue('services', "asterisk,afp,cisco,cisco-enable,cvs,firebird,ftp,ftps,http-head,http-get,https-head,https-get,http-get-form,http-post-form,https-get-form,https-post-form,http-proxy,http-proxy-urlenum,icq,imap,imaps,irc,ldap2,ldap2s,ldap3,ldap3s,ldap3-crammd5,ldap3-crammd5s,ldap3-digestmd5,ldap3-digestmd5s,mssql,mysql,ncp,nntp,oracle-listener,oracle-sid,pcanywhere,pcnfs,pop3,pop3s,postgres,rdp,rexec,rlogin,rsh,s7-300,sip,smb,smtp,smtps,smtp-enum,snmp,socks5,ssh,sshkey,svn,teamspeak,telnet,telnets,vmauthd,vnc,xmpp")
self.actions.setValue('no-username-services', "cisco,cisco-enable,oracle-listener,s7-300,snmp,vnc")
self.actions.setValue('no-password-services', "oracle-sid,rsh,smtp-enum")
self.actions.endGroup()
self.actions.beginGroup('StagedNmapSettings')
self.actions.setValue('stage1-ports','T:80,443')
self.actions.setValue('stage2-ports','T:25,135,137,139,445,1433,3306,5432,U:137,161,162,1434')
self.actions.setValue('stage3-ports','T:23,21,22,110,111,2049,3389,8080,U:500,5060')
self.actions.setValue('stage4-ports','T:0-20,24,26-79,81-109,112-134,136,138,140-442,444,446-1432,1434-2048,2050-3305,3307-3388,3390-5431,5433-8079,8081-29999')
self.actions.setValue('stage5-ports','T:30000-65535')
self.actions.endGroup()
self.actions.beginGroup('ToolSettings')
self.actions.setValue('nmap-path','/usr/bin/nmap')
self.actions.setValue('hydra-path','/usr/bin/hydra')
self.actions.setValue('cutycapt-path','/usr/bin/cutycapt')
self.actions.setValue('texteditor-path','/usr/bin/leafpad')
self.actions.endGroup()
self.actions.beginGroup('HostActions')
self.actions.setValue("nmap-fast-tcp", ["Run nmap (fast TCP)", "nmap -Pn -F -T4 -vvvv [IP] -oA [OUTPUT]"])
self.actions.setValue("nmap-full-tcp", ["Run nmap (full TCP)", "nmap -Pn -sV -sC -O -p- -T4 -vvvvv [IP] -oA [OUTPUT]"])
self.actions.setValue("nmap-fast-udp", ["Run nmap (fast UDP)", "nmap -n -Pn -sU -F --min-rate=1000 -vvvvv [IP] -oA [OUTPUT]"])
self.actions.setValue("nmap-udp-1000", ["Run nmap (top 1000 quick UDP)", "nmap -n -Pn -sU --min-rate=1000 -vvvvv [IP] -oA [OUTPUT]"])
self.actions.setValue("nmap-full-udp", ["Run nmap (full UDP)", "nmap -n -Pn -sU -p- -T4 -vvvvv [IP] -oA [OUTPUT]"])
self.actions.setValue("unicornscan-full-udp", ["Run unicornscan (full UDP)", "unicornscan -mU -Ir 1000 [IP]:a -v"])
self.actions.endGroup()
self.actions.beginGroup('PortActions')
self.actions.setValue("banner", ["Grab banner", "bash -c \"echo \"\" | nc -v -n -w1 [IP] [PORT]\"", ""])
self.actions.setValue("screenshooter", ["Take a webservice screenshot", "", ""])
self.actions.setValue("nmap", ["Run nmap (scripts) on port", "nmap -Pn -sV -sC -vvvvv -p[PORT] [IP] -oA [OUTPUT]", ""])
self.actions.setValue("nikto", ["Run nikto", "nikto -o [OUTPUT].txt -p [PORT] -h [IP]", "http,https,ssl,soap,http-proxy,http-alt"])
self.actions.setValue("dirbuster", ["Launch dirbuster", "java -Xmx256M -jar /usr/share/dirbuster/DirBuster-1.0-RC1.jar -u http://[IP]:[PORT]/", "http,https,ssl,soap,http-proxy,http-alt"])
self.actions.setValue("webslayer", ["Launch webslayer", "webslayer", "http,https,ssl,soap,http-proxy,http-alt"])
### SMB
self.actions.setValue("samrdump", ["Run samrdump", "python /usr/share/doc/python-impacket-doc/examples/samrdump.py [IP] [PORT]/SMB", "netbios-ssn,microsoft-ds"])
self.actions.setValue("nbtscan", ["Run nbtscan", "nbtscan -v -h [IP]", "netbios-ns"])
self.actions.setValue("smbenum", ["Run smbenum", "bash ./scripts/smbenum.sh [IP]", "netbios-ssn,microsoft-ds"])
self.actions.setValue("enum4linux", ["Run enum4linux", "enum4linux [IP]", "netbios-ssn,microsoft-ds"])
self.actions.setValue("polenum", ["Extract password policy (polenum)", "polenum [IP]", "netbios-ssn,microsoft-ds"])
self.actions.setValue("smb-enum-users", ["Enumerate users (nmap)", "nmap -p[PORT] --script=smb-enum-users [IP] -vvvvv", "netbios-ssn,microsoft-ds"])
self.actions.setValue("smb-enum-users-rpc", ["Enumerate users (rpcclient)", "bash -c \"echo 'enumdomusers' | rpcclient [IP] -U%\"", "netbios-ssn,microsoft-ds"])
self.actions.setValue("smb-enum-admins", ["Enumerate domain admins (net)", "net rpc group members \"Domain Admins\" -I [IP] -U% ", "netbios-ssn,microsoft-ds"])
self.actions.setValue("smb-enum-groups", ["Enumerate groups (nmap)", "nmap -p[PORT] --script=smb-enum-groups [IP] -vvvvv", "netbios-ssn,microsoft-ds"])
self.actions.setValue("smb-enum-shares", ["Enumerate shares (nmap)", "nmap -p[PORT] --script=smb-enum-shares [IP] -vvvvv", "netbios-ssn,microsoft-ds"])
self.actions.setValue("smb-enum-sessions", ["Enumerate logged in users (nmap)", "nmap -p[PORT] --script=smb-enum-sessions [IP] -vvvvv", "netbios-ssn,microsoft-ds"])
self.actions.setValue("smb-enum-policies", ["Extract password policy (nmap)", "nmap -p[PORT] --script=smb-enum-domains [IP] -vvvvv", "netbios-ssn,microsoft-ds"])
self.actions.setValue("smb-null-sessions", ["Check for null sessions (rpcclient)", "bash -c \"echo 'srvinfo' | rpcclient [IP] -U%\"", "netbios-ssn,microsoft-ds"])
###
self.actions.setValue("ldapsearch", ["Run ldapsearch", "ldapsearch -h [IP] -p [PORT] -x -s base", "ldap"])
self.actions.setValue("snmpcheck", ["Run snmpcheck", "snmpcheck -t [IP]", "snmp,snmptrap"])
self.actions.setValue("rpcinfo", ["Run rpcinfo", "rpcinfo -p [IP]", "rpcbind"])
self.actions.setValue("showmount", ["Show nfs shares", "showmount -e [IP]", "nfs"])
self.actions.setValue("x11screen", ["Run x11screenshot", "bash ./scripts/x11screenshot.sh [IP]", "X11"])
self.actions.setValue("sslscan", ["Run sslscan", "sslscan --no-failed [IP]:[PORT]", "http,https,ssl,soap,http-proxy,http-alt"])
self.actions.setValue("rwho", ["Run rwho", "rwho -a [IP]", "who"])
self.actions.setValue("finger", ["Enumerate users (finger)", "./scripts/fingertool.sh [IP]", "finger"])
self.actions.setValue("smtp-enum-vrfy", ["Enumerate SMTP users (VRFY)", "smtp-user-enum -M VRFY -U /usr/share/metasploit-framework/data/wordlists/unix_users.txt -t [IP] -p [PORT]", "smtp"])
self.actions.setValue("smtp-enum-expn", ["Enumerate SMTP users (EXPN)", "smtp-user-enum -M EXPN -U /usr/share/metasploit-framework/data/wordlists/unix_users.txt -t [IP] -p [PORT]", "smtp"])
self.actions.setValue("smtp-enum-rcpt", ["Enumerate SMTP users (RCPT)", "smtp-user-enum -M RCPT -U /usr/share/metasploit-framework/data/wordlists/unix_users.txt -t [IP] -p [PORT]", "smtp"])
self.actions.setValue("ftp-default", ["Check for default ftp credentials", "hydra -s [PORT] -C ./wordlists/ftp-default-userpass.txt -u -o \"[OUTPUT].txt\" -f [IP] ftp", "ftp"])
self.actions.setValue("mssql-default", ["Check for default mssql credentials", "hydra -s [PORT] -C ./wordlists/mssql-default-userpass.txt -u -o \"[OUTPUT].txt\" -f [IP] mssql", "ms-sql-s"])
self.actions.setValue("mysql-default", ["Check for default mysql credentials", "hydra -s [PORT] -C ./wordlists/mysql-default-userpass.txt -u -o \"[OUTPUT].txt\" -f [IP] mysql", "mysql"])
self.actions.setValue("oracle-default", ["Check for default oracle credentials", "hydra -s [PORT] -C ./wordlists/oracle-default-userpass.txt -u -o \"[OUTPUT].txt\" -f [IP] oracle-listener", "oracle-tns"])
self.actions.setValue("postgres-default", ["Check for default postgres credentials", "hydra -s [PORT] -C ./wordlists/postgres-default-userpass.txt -u -o \"[OUTPUT].txt\" -f [IP] postgres", "postgresql"])
#self.actions.setValue("snmp-default", ["Check for default community strings", "onesixtyone -c /usr/share/doc/onesixtyone/dict.txt [IP]", "snmp,snmptrap"])
#self.actions.setValue("snmp-default", ["Check for default community strings", "python ./scripts/snmpbrute.py.old -t [IP] -p [PORT] -f ./wordlists/snmp-default.txt", "snmp,snmptrap"])
self.actions.setValue("snmp-default", ["Check for default community strings", "python ./scripts/snmpbrute.py -t [IP] -p [PORT] -f ./wordlists/snmp-default.txt -b --no-colours", "snmp,snmptrap"])
self.actions.setValue("snmp-brute", ["Bruteforce community strings (medusa)", "bash -c \"medusa -h [IP] -u root -P ./wordlists/snmp-default.txt -M snmp | grep SUCCESS\"", "snmp,snmptrap"])
self.actions.setValue("oracle-version", ["Get version", "msfcli auxiliary/scanner/oracle/tnslsnr_version rhosts=[IP] E", "oracle-tns"])
self.actions.setValue("oracle-sid", ["Oracle SID enumeration", "msfcli auxiliary/scanner/oracle/sid_enum rhosts=[IP] E", "oracle-tns"])
###
self.actions.endGroup()
self.actions.beginGroup('PortTerminalActions')
self.actions.setValue("netcat", ["Open with netcat", "nc -v [IP] [PORT]", ""])
self.actions.setValue("telnet", ["Open with telnet", "telnet [IP] [PORT]", ""])
self.actions.setValue("ftp", ["Open with ftp client", "ftp [IP] [PORT]", "ftp"])
self.actions.setValue("mysql", ["Open with mysql client (as root)", "mysql -u root -h [IP] --port=[PORT] -p", "mysql"])
self.actions.setValue("mssql", ["Open with mssql client (as sa)", "python /usr/share/doc/python-impacket-doc/examples/mssqlclient.py -p [PORT] sa@[IP]", "mys-sql-s,codasrv-se"])
self.actions.setValue("ssh", ["Open with ssh client (as root)", "ssh root@[IP] -p [PORT]", "ssh"])
self.actions.setValue("psql", ["Open with postgres client (as postgres)", "psql -h [IP] -p [PORT] -U postgres", "postgres"])
self.actions.setValue("rdesktop", ["Open with rdesktop", "rdesktop [IP]:[PORT]", "ms-wbt-server"])
self.actions.setValue("rpcclient", ["Open with rpcclient (NULL session)", "rpcclient [IP] -p [PORT] -U%", "netbios-ssn,microsoft-ds"])
self.actions.setValue("vncviewer", ["Open with vncviewer", "vncviewer [IP]:[PORT]", "vnc"])
self.actions.setValue("xephyr", ["Open with Xephyr", "Xephyr -query [IP] :1", "xdmcp"])
self.actions.setValue("rlogin", ["Open with rlogin", "rlogin -i root -p [PORT] [IP]", "login"])
self.actions.setValue("rsh", ["Open with rsh", "rsh -l root [IP]", "shell"])
self.actions.endGroup()
self.actions.beginGroup('SchedulerSettings')
self.actions.setValue("nikto",["http,https,ssl,soap,http-proxy,http-alt","tcp"])
self.actions.setValue("screenshooter",["http,https,ssl,http-proxy,http-alt","tcp"])
self.actions.setValue("smbenum",["microsoft-ds","tcp"])
# self.actions.setValue("enum4linux","netbios-ssn,microsoft-ds")
# self.actions.setValue("smb-null-sessions","netbios-ssn,microsoft-ds")
# self.actions.setValue("nbtscan","netbios-ns")
self.actions.setValue("snmpcheck",["snmp","udp"])
self.actions.setValue("x11screen",["X11","tcp"])
self.actions.setValue("snmp-default",["snmp","udp"])
self.actions.setValue("smtp-enum-vrfy",["smtp","tcp"])
self.actions.setValue("mysql-default",["mysql","tcp"])
self.actions.setValue("mssql-default",["ms-sql-s","tcp"])
self.actions.setValue("ftp-default",["ftp","tcp"])
self.actions.setValue("postgres-default",["postgresql","tcp"])
self.actions.setValue("oracle-default",["oracle-tns","tcp"])
self.actions.endGroup()
self.actions.sync()
# NOTE: the weird order of elements in the functions below is due to historical reasons. Change this some day.
def getGeneralSettings(self):
settings = dict()
self.actions.beginGroup('GeneralSettings')
keys = self.actions.childKeys()
for k in keys:
settings.update({str(k):str(self.actions.value(k).toString())})
self.actions.endGroup()
return settings
def getBruteSettings(self):
settings = dict()
self.actions.beginGroup('BruteSettings')
keys = self.actions.childKeys()
for k in keys:
settings.update({str(k):str(self.actions.value(k).toString())})
self.actions.endGroup()
return settings
def getStagedNmapSettings(self):
settings = dict()
self.actions.beginGroup('StagedNmapSettings')
keys = self.actions.childKeys()
for k in keys:
settings.update({str(k):str(self.actions.value(k).toString())})
self.actions.endGroup()
return settings
def getToolSettings(self):
settings = dict()
self.actions.beginGroup('ToolSettings')
keys = self.actions.childKeys()
for k in keys:
settings.update({str(k):str(self.actions.value(k).toString())})
self.actions.endGroup()
return settings
# this function fetches all the host actions from the settings file
def getHostActions(self):
hostactions = []
sortArray = []
self.actions.beginGroup('HostActions')
keys = self.actions.childKeys()
for k in keys:
hostactions.append([self.actions.value(k).toList()[0].toString(), str(k), self.actions.value(k).toList()[1].toString()])
sortArray.append(self.actions.value(k).toList()[0].toString())
self.actions.endGroup()
sortArrayWithArray(sortArray, hostactions) # sort by label so that it appears nicely in the context menu
return hostactions
# this function fetches all the port actions from the settings file
def getPortActions(self):
portactions = []
sortArray = []
self.actions.beginGroup('PortActions')
keys = self.actions.childKeys()
for k in keys:
portactions.append([self.actions.value(k).toList()[0].toString(), str(k), self.actions.value(k).toList()[1].toString(), self.actions.value(k).toList()[2].toString()])
sortArray.append(self.actions.value(k).toList()[0].toString())
self.actions.endGroup()
sortArrayWithArray(sortArray, portactions) # sort by label so that it appears nicely in the context menu
return portactions
# this function fetches all the port actions that will be run as terminal commands from the settings file
def getPortTerminalActions(self):
portactions = []
sortArray = []
self.actions.beginGroup('PortTerminalActions')
keys = self.actions.childKeys()
for k in keys:
portactions.append([self.actions.value(k).toList()[0].toString(), str(k), self.actions.value(k).toList()[1].toString(), self.actions.value(k).toList()[2].toString()])
sortArray.append(self.actions.value(k).toList()[0].toString())
self.actions.endGroup()
sortArrayWithArray(sortArray, portactions) # sort by label so that it appears nicely in the context menu
return portactions
def getSchedulerSettings(self):
settings = []
self.actions.beginGroup('SchedulerSettings')
keys = self.actions.childKeys()
for k in keys:
settings.append([str(k),self.actions.value(k).toList()[0].toString(),self.actions.value(k).toList()[1].toString()])
self.actions.endGroup()
return settings
def getSchedulerSettings_old(self):
settings = dict()
self.actions.beginGroup('SchedulerSettings')
keys = self.actions.childKeys()
for k in keys:
settings.update({str(k):str(self.actions.value(k).toString())})
self.actions.endGroup()
return settings
def backupAndSave(self, newSettings):
# Backup and save
print '[+] Backing up old settings and saving new settings..'
os.rename('./sparta.conf', './'+getTimestamp()+'-sparta.conf')
self.actions = QtCore.QSettings('./sparta.conf', QtCore.QSettings.NativeFormat)
self.actions.beginGroup('GeneralSettings')
self.actions.setValue('default-terminal',newSettings.general_default_terminal)
self.actions.setValue('tool-output-black-background',newSettings.general_tool_output_black_background)
self.actions.setValue('screenshooter-timeout',newSettings.general_screenshooter_timeout)
self.actions.setValue('web-services',newSettings.general_web_services)
self.actions.setValue('enable-scheduler',newSettings.general_enable_scheduler)
self.actions.setValue('max-fast-processes', newSettings.general_max_fast_processes)
self.actions.setValue('max-slow-processes', newSettings.general_max_slow_processes)
self.actions.endGroup()
self.actions.beginGroup('BruteSettings')
self.actions.setValue('store-cleartext-passwords-on-exit',newSettings.brute_store_cleartext_passwords_on_exit)
self.actions.setValue('username-wordlist-path',newSettings.brute_username_wordlist_path)
self.actions.setValue('password-wordlist-path',newSettings.brute_password_wordlist_path)
self.actions.setValue('default-username',newSettings.brute_default_username)
self.actions.setValue('default-password',newSettings.brute_default_password)
self.actions.setValue('services', newSettings.brute_services)
self.actions.setValue('no-username-services', newSettings.brute_no_username_services)
self.actions.setValue('no-password-services', newSettings.brute_no_password_services)
self.actions.endGroup()
self.actions.beginGroup('StagedNmapSettings')
self.actions.setValue('stage1-ports',newSettings.tools_nmap_stage1_ports)
self.actions.setValue('stage2-ports',newSettings.tools_nmap_stage2_ports)
self.actions.setValue('stage3-ports',newSettings.tools_nmap_stage3_ports)
self.actions.setValue('stage4-ports',newSettings.tools_nmap_stage4_ports)
self.actions.setValue('stage5-ports',newSettings.tools_nmap_stage5_ports)
self.actions.endGroup()
self.actions.beginGroup('HostActions')
for a in newSettings.hostActions:
self.actions.setValue(a[1], [a[0], a[2]])
self.actions.endGroup()
self.actions.beginGroup('PortActions')
for a in newSettings.portActions:
self.actions.setValue(a[1], [a[0], a[2], a[3]])
self.actions.endGroup()
self.actions.beginGroup('PortTerminalActions')
for a in newSettings.portTerminalActions:
self.actions.setValue(a[1], [a[0], a[2], a[3]])
self.actions.endGroup()
self.actions.beginGroup('SchedulerSettings')
for tool in newSettings.automatedAttacks:
self.actions.setValue(tool, newSettings.automatedAttacks[tool])
self.actions.endGroup()
self.actions.sync()
# This class first sets all the default settings and then overwrites them with the settings found in the configuration file
class Settings():
def __init__(self, appSettings=None):
# general
self.general_default_terminal = "gnome-terminal"
self.general_tool_output_black_background = "False"
self.general_screenshooter_timeout = "15000"
self.general_web_services = "http,https,ssl,soap,http-proxy,http-alt,https-alt"
self.general_enable_scheduler = "True"
self.general_max_fast_processes = "10"
self.general_max_slow_processes = "10"
# brute
self.brute_store_cleartext_passwords_on_exit = "True"
self.brute_username_wordlist_path = "/usr/share/wordlists/"
self.brute_password_wordlist_path = "/usr/share/wordlists/"
self.brute_default_username = "root"
self.brute_default_password = "password"
self.brute_services = "asterisk,afp,cisco,cisco-enable,cvs,firebird,ftp,ftps,http-head,http-get,https-head,https-get,http-get-form,http-post-form,https-get-form,https-post-form,http-proxy,http-proxy-urlenum,icq,imap,imaps,irc,ldap2,ldap2s,ldap3,ldap3s,ldap3-crammd5,ldap3-crammd5s,ldap3-digestmd5,ldap3-digestmd5s,mssql,mysql,ncp,nntp,oracle-listener,oracle-sid,pcanywhere,pcnfs,pop3,pop3s,postgres,rdp,rexec,rlogin,rsh,s7-300,sip,smb,smtp,smtps,smtp-enum,snmp,socks5,ssh,sshkey,svn,teamspeak,telnet,telnets,vmauthd,vnc,xmpp"
self.brute_no_username_services = "cisco,cisco-enable,oracle-listener,s7-300,snmp,vnc"
self.brute_no_password_services = "oracle-sid,rsh,smtp-enum"
# tools
self.tools_nmap_stage1_ports = "T:80,443"
self.tools_nmap_stage2_ports = "T:25,135,137,139,445,1433,3306,5432,U:137,161,162,1434"
self.tools_nmap_stage3_ports = "T:23,21,22,110,111,2049,3389,8080,U:500,5060"
self.tools_nmap_stage4_ports = "T:0-20,24,26-79,81-109,112-134,136,138,140-442,444,446-1432,1434-2048,2050-3305,3307-3388,3390-5431,5433-8079,8081-29999"
self.tools_nmap_stage5_ports = "T:30000-65535"
self.tools_path_nmap = "/usr/bin/nmap"
self.tools_path_hydra = "/usr/bin/hydra"
self.tools_path_cutycapt = "/usr/bin/cutycapt"
self.tools_path_texteditor = "/usr/bin/leafpad"
self.hostActions = []
self.portActions = []
self.portTerminalActions = []
self.stagedNmapSettings = []
self.automatedAttacks = []
# now that all defaults are set, overwrite with whatever was in the .conf file (stored in appSettings)
if appSettings:
try:
self.generalSettings = appSettings.getGeneralSettings()
self.bruteSettings = appSettings.getBruteSettings()
self.stagedNmapSettings = appSettings.getStagedNmapSettings()
self.toolSettings = appSettings.getToolSettings()
self.hostActions = appSettings.getHostActions()
self.portActions = appSettings.getPortActions()
self.portTerminalActions = appSettings.getPortTerminalActions()
self.automatedAttacks = appSettings.getSchedulerSettings()
# general
self.general_default_terminal = self.generalSettings['default-terminal']
self.general_tool_output_black_background = self.generalSettings['tool-output-black-background']
self.general_screenshooter_timeout = self.generalSettings['screenshooter-timeout']
self.general_web_services = self.generalSettings['web-services']
self.general_enable_scheduler = self.generalSettings['enable-scheduler']
self.general_max_fast_processes = self.generalSettings['max-fast-processes']
self.general_max_slow_processes = self.generalSettings['max-slow-processes']
# brute
self.brute_store_cleartext_passwords_on_exit = self.bruteSettings['store-cleartext-passwords-on-exit']
self.brute_username_wordlist_path = self.bruteSettings['username-wordlist-path']
self.brute_password_wordlist_path = self.bruteSettings['password-wordlist-path']
self.brute_default_username = self.bruteSettings['default-username']
self.brute_default_password = self.bruteSettings['default-password']
self.brute_services = self.bruteSettings['services']
self.brute_no_username_services = self.bruteSettings['no-username-services']
self.brute_no_password_services = self.bruteSettings['no-password-services']
# tools
self.tools_nmap_stage1_ports = self.stagedNmapSettings['stage1-ports']
self.tools_nmap_stage2_ports = self.stagedNmapSettings['stage2-ports']
self.tools_nmap_stage3_ports = self.stagedNmapSettings['stage3-ports']
self.tools_nmap_stage4_ports = self.stagedNmapSettings['stage4-ports']
self.tools_nmap_stage5_ports = self.stagedNmapSettings['stage5-ports']
self.tools_path_nmap = self.toolSettings['nmap-path']
self.tools_path_hydra = self.toolSettings['hydra-path']
self.tools_path_cutycapt = self.toolSettings['cutycapt-path']
self.tools_path_texteditor = self.toolSettings['texteditor-path']
except KeyError:
print '\t[-] Something went wrong while loading the configuration file. Falling back to default settings for some settings.'
print '\t[-] Go to the settings menu to fix the issues!'
# TODO: send signal to automatically open settings dialog here
def __eq__(self, other): # returns false if settings objects are different
if type(other) is type(self):
return self.__dict__ == other.__dict__
return False
if __name__ == "__main__":
settings = AppSettings()
s = Settings(settings)
s2 = Settings(settings)
print s == s2
s2.general_default_terminal = 'whatever'
print s == s2
| gpl-3.0 |
svn2github/chromium-depot-tools | third_party/gsutil/gslib/bucket_listing_ref.py | 51 | 6349 | # Copyright 2012 Google Inc. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import time
class BucketListingRef(object):
"""
Container that holds a reference to one result from a bucket listing, allowing
polymorphic iteration over wildcard-iterated URIs, Keys, or Prefixes. At a
minimum, every reference contains a StorageUri. If the reference came from a
bucket listing (as opposed to a manually instantiated ref that might populate
only the StorageUri), it will additionally contain either a Key or a Prefix,
depending on whether it was a reference to an object or was just a prefix of a
path (i.e., bucket subdirectory). The latter happens when the bucket was
listed using delimiter='/'.
Note that Keys are shallow-populated, based on the contents extracted from
parsing a bucket listing. This includes name, length, and other fields
(basically, the info listed by gsutil ls -l), but does not include information
like ACL and location (which require separate server requests, which is why
there's a separate gsutil ls -L option to get this more detailed info).
"""
def __init__(self, uri, key=None, prefix=None, headers=None):
"""Instantiate BucketListingRef from uri and (if available) key or prefix.
Args:
uri: StorageUri for the object (required).
key: Key for the object, or None if not available.
prefix: Prefix for the subdir, or None if not available.
headers: Dictionary containing optional HTTP headers to pass to boto
(which happens when GetKey() is called on an BucketListingRef which
has no constructor-populated Key), or None if not available.
At most one of key and prefix can be populated.
"""
assert key is None or prefix is None
self.uri = uri
self.key = key
self.prefix = prefix
self.headers = headers or {}
def GetUri(self):
"""Get URI form of listed URI.
Returns:
StorageUri.
"""
return self.uri
def GetUriString(self):
"""Get string URI form of listed URI.
Returns:
String.
"""
return self.uri.uri
def NamesBucket(self):
"""Determines if this BucketListingRef names a bucket.
Returns:
bool indicator.
"""
return self.key is None and self.prefix is None and self.uri.names_bucket()
def IsLatest(self):
"""Determines if this BucketListingRef names the latest version of an
object.
Returns:
bool indicator.
"""
return hasattr(self.uri, 'is_latest') and self.uri.is_latest
def GetRStrippedUriString(self):
"""Get string URI form of listed URI, stripped of any right trailing
delims, and without version string.
Returns:
String.
"""
return self.uri.versionless_uri.rstrip('/')
def HasKey(self):
"""Return bool indicator of whether this BucketListingRef has a Key."""
return bool(self.key)
def HasPrefix(self):
"""Return bool indicator of whether this BucketListingRef has a Prefix."""
return bool(self.prefix)
def GetKey(self):
"""Get Key form of listed URI.
Returns:
Subclass of boto.s3.key.Key.
Raises:
BucketListingRefException: for bucket-only uri.
"""
# For gsutil ls -l gs://bucket self.key will be populated from (boto)
# parsing the bucket listing. But as noted and handled below there are
# cases where self.key isn't populated.
if not self.key:
if not self.uri.names_object():
raise BucketListingRefException(
'Attempt to call GetKey() on Key-less BucketListingRef (uri=%s) ' %
self.uri)
# This case happens when we do gsutil ls -l on a object name-ful
# StorageUri with no object-name wildcard. Since the ls command
# implementation only reads bucket info we need to read the object
# for this case.
self.key = self.uri.get_key(validate=False, headers=self.headers)
# When we retrieve the object this way its last_modified timestamp
# is formatted in RFC 1123 format, which is different from when we
# retrieve from the bucket listing (which uses ISO 8601 format), so
# convert so we consistently return ISO 8601 format.
tuple_time = (time.strptime(self.key.last_modified,
'%a, %d %b %Y %H:%M:%S %Z'))
self.key.last_modified = time.strftime('%Y-%m-%dT%H:%M:%S', tuple_time)
return self.key
def GetPrefix(self):
"""Get Prefix form of listed URI.
Returns:
boto.s3.prefix.Prefix.
Raises:
BucketListingRefException: if this object has no Prefix.
"""
if not self.prefix:
raise BucketListingRefException(
'Attempt to call GetPrefix() on Prefix-less BucketListingRef '
'(uri=%s)' % self.uri)
return self.prefix
def __repr__(self):
"""Returns string representation of BucketListingRef."""
return 'BucketListingRef(%s, HasKey=%s, HasPrefix=%s)' % (
self.uri, self.HasKey(), self.HasPrefix())
class BucketListingRefException(StandardError):
"""Exception thrown for invalid BucketListingRef requests."""
def __init__(self, reason):
StandardError.__init__(self)
self.reason = reason
def __repr__(self):
return 'BucketListingRefException: %s' % self.reason
def __str__(self):
return 'BucketListingRefException: %s' % self.reason
| bsd-3-clause |
alheinecke/tensorflow-xsmm | tensorflow/contrib/session_bundle/exporter.py | 49 | 12341 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Export a TensorFlow model.
See: go/tf-exporter
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import re
import six
from google.protobuf.any_pb2 import Any
from tensorflow.contrib.session_bundle import constants
from tensorflow.contrib.session_bundle import gc
from tensorflow.contrib.session_bundle import manifest_pb2
from tensorflow.core.framework import graph_pb2
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.platform import gfile
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import saver as tf_saver
from tensorflow.python.training import training_util
from tensorflow.python.util import compat
from tensorflow.python.util.deprecation import deprecated
@deprecated("2017-06-30", "Please use SavedModel instead.")
def gfile_copy_callback(files_to_copy, export_dir_path):
"""Callback to copy files using `gfile.Copy` to an export directory.
This method is used as the default `assets_callback` in `Exporter.init` to
copy assets from the `assets_collection`. It can also be invoked directly to
copy additional supplementary files into the export directory (in which case
it is not a callback).
Args:
files_to_copy: A dictionary that maps original file paths to desired
basename in the export directory.
export_dir_path: Directory to copy the files to.
"""
logging.info("Write assets into: %s using gfile_copy.", export_dir_path)
gfile.MakeDirs(export_dir_path)
for source_filepath, basename in files_to_copy.items():
new_path = os.path.join(
compat.as_bytes(export_dir_path), compat.as_bytes(basename))
logging.info("Copying asset %s to path %s.", source_filepath, new_path)
if gfile.Exists(new_path):
# Guard against being restarted while copying assets, and the file
# existing and being in an unknown state.
# TODO(b/28676216): Do some file checks before deleting.
logging.info("Removing file %s.", new_path)
gfile.Remove(new_path)
gfile.Copy(source_filepath, new_path)
@deprecated("2017-06-30", "Please use SavedModel instead.")
def regression_signature(input_tensor, output_tensor):
"""Creates a regression signature.
Args:
input_tensor: Tensor specifying the input to a graph.
output_tensor: Tensor specifying the output of a graph.
Returns:
A Signature message.
"""
signature = manifest_pb2.Signature()
signature.regression_signature.input.tensor_name = input_tensor.name
signature.regression_signature.output.tensor_name = output_tensor.name
return signature
@deprecated("2017-06-30", "Please use SavedModel instead.")
def classification_signature(input_tensor,
classes_tensor=None,
scores_tensor=None):
"""Creates a classification signature.
Args:
input_tensor: Tensor specifying the input to a graph.
classes_tensor: Tensor specifying the output classes of a graph.
scores_tensor: Tensor specifying the scores of the output classes.
Returns:
A Signature message.
"""
signature = manifest_pb2.Signature()
signature.classification_signature.input.tensor_name = input_tensor.name
if classes_tensor is not None:
signature.classification_signature.classes.tensor_name = classes_tensor.name
if scores_tensor is not None:
signature.classification_signature.scores.tensor_name = scores_tensor.name
return signature
@deprecated("2017-06-30", "Please use SavedModel instead.")
def generic_signature(name_tensor_map):
"""Creates a generic signature of name to Tensor name.
Args:
name_tensor_map: Map from logical name to Tensor.
Returns:
A Signature message.
"""
signature = manifest_pb2.Signature()
for name, tensor in six.iteritems(name_tensor_map):
signature.generic_signature.map[name].tensor_name = tensor.name
return signature
class Exporter(object):
"""Exporter helps package a TensorFlow model for serving.
Args:
saver: Saver object.
"""
def __init__(self, saver):
# Makes a copy of the saver-def and disables garbage-collection, since the
# exporter enforces garbage-collection independently. Specifically, since
# the exporter performs atomic copies of the saver output, it is required
# that garbage-collection via the underlying saver be disabled.
saver_def = saver.as_saver_def()
saver_def.ClearField("max_to_keep")
self._saver = tf_saver.Saver(saver_def=saver_def)
self._has_init = False
self._assets_to_copy = {}
@deprecated("2017-06-30", "Please use SavedModel instead.")
def init(self,
graph_def=None,
init_op=None,
clear_devices=False,
default_graph_signature=None,
named_graph_signatures=None,
assets_collection=None,
assets_callback=gfile_copy_callback):
"""Initialization.
Args:
graph_def: A GraphDef message of the graph to be used in inference.
GraphDef of default graph is used when None.
init_op: Op to be used in initialization.
clear_devices: If device info of the graph should be cleared upon export.
default_graph_signature: Default signature of the graph.
named_graph_signatures: Map of named input/output signatures of the graph.
assets_collection: A collection of constant asset filepath tensors. If set
the assets will be exported into the asset directory.
assets_callback: callback with two argument called during export with the
list of files to copy and the asset path.
Raises:
RuntimeError: if init is called more than once.
TypeError: if init_op is not an Operation or None.
ValueError: if asset file path tensors are not non-empty constant string
scalar tensors.
"""
# Avoid Dangerous default value []
if named_graph_signatures is None:
named_graph_signatures = {}
assets = []
if assets_collection:
for asset_tensor in assets_collection:
asset_filepath = self._file_path_value(asset_tensor)
if not asset_filepath:
raise ValueError("invalid asset filepath tensor %s" % asset_tensor)
basename = os.path.basename(asset_filepath)
assets.append((basename, asset_tensor))
self._assets_to_copy[asset_filepath] = basename
if self._has_init:
raise RuntimeError("init should be called only once")
self._has_init = True
if graph_def or clear_devices:
copy = graph_pb2.GraphDef()
if graph_def:
copy.CopyFrom(graph_def)
else:
copy.CopyFrom(ops.get_default_graph().as_graph_def())
if clear_devices:
for node in copy.node:
node.device = ""
graph_any_buf = Any()
graph_any_buf.Pack(copy)
ops.add_to_collection(constants.GRAPH_KEY, graph_any_buf)
if init_op:
if not isinstance(init_op, ops.Operation):
raise TypeError("init_op needs to be an Operation: %s" % init_op)
ops.add_to_collection(constants.INIT_OP_KEY, init_op)
signatures_proto = manifest_pb2.Signatures()
if default_graph_signature:
signatures_proto.default_signature.CopyFrom(default_graph_signature)
for signature_name, signature in six.iteritems(named_graph_signatures):
signatures_proto.named_signatures[signature_name].CopyFrom(signature)
signatures_any_buf = Any()
signatures_any_buf.Pack(signatures_proto)
ops.add_to_collection(constants.SIGNATURES_KEY, signatures_any_buf)
for filename, tensor in assets:
asset = manifest_pb2.AssetFile()
asset.filename = filename
asset.tensor_binding.tensor_name = tensor.name
asset_any_buf = Any()
asset_any_buf.Pack(asset)
ops.add_to_collection(constants.ASSETS_KEY, asset_any_buf)
self._assets_callback = assets_callback
@deprecated("2017-06-30", "Please use SavedModel instead.")
def export(self,
export_dir_base,
global_step_tensor,
sess=None,
exports_to_keep=None):
"""Exports the model.
Args:
export_dir_base: A string path to the base export dir.
global_step_tensor: An Tensor or tensor name providing the
global step counter to append to the export directory path and set
in the manifest version.
sess: A Session to use to save the parameters.
exports_to_keep: a gc.Path filter function used to determine the set of
exports to keep. If set to None, all versions will be kept.
Returns:
The string path to the exported directory.
Raises:
RuntimeError: if init is not called.
RuntimeError: if the export would overwrite an existing directory.
"""
if not self._has_init:
raise RuntimeError("init must be called first")
# Export dir must not end with / or it will break exports to keep. Strip /.
if export_dir_base.endswith("/"):
export_dir_base = export_dir_base[:-1]
global_step = training_util.global_step(sess, global_step_tensor)
export_dir = os.path.join(
compat.as_bytes(export_dir_base),
compat.as_bytes(constants.VERSION_FORMAT_SPECIFIER % global_step))
# Prevent overwriting on existing exports which could lead to bad/corrupt
# storage and loading of models. This is an important check that must be
# done before any output files or directories are created.
if gfile.Exists(export_dir):
raise RuntimeError("Overwriting exports can cause corruption and are "
"not allowed. Duplicate export dir: %s" % export_dir)
# Output to a temporary directory which is atomically renamed to the final
# directory when complete.
tmp_export_dir = compat.as_text(export_dir) + "-tmp"
gfile.MakeDirs(tmp_export_dir)
self._saver.save(sess,
os.path.join(
compat.as_text(tmp_export_dir),
compat.as_text(constants.EXPORT_BASE_NAME)),
meta_graph_suffix=constants.EXPORT_SUFFIX_NAME)
# Run the asset callback.
if self._assets_callback and self._assets_to_copy:
assets_dir = os.path.join(
compat.as_bytes(tmp_export_dir),
compat.as_bytes(constants.ASSETS_DIRECTORY))
gfile.MakeDirs(assets_dir)
self._assets_callback(self._assets_to_copy, assets_dir)
# TODO(b/27794910): Delete *checkpoint* file before rename.
gfile.Rename(tmp_export_dir, export_dir)
if exports_to_keep:
# create a simple parser that pulls the export_version from the directory.
def parser(path):
match = re.match("^" + export_dir_base + "/(\\d{8})$", path.path)
if not match:
return None
return path._replace(export_version=int(match.group(1)))
paths_to_delete = gc.negation(exports_to_keep)
for p in paths_to_delete(gc.get_paths(export_dir_base, parser=parser)):
gfile.DeleteRecursively(p.path)
return export_dir
def _file_path_value(self, path_tensor):
"""Returns the filepath value stored in constant `path_tensor`."""
if not isinstance(path_tensor, ops.Tensor):
raise TypeError("tensor is not a Tensor")
if path_tensor.op.type != "Const":
raise TypeError("Only constants tensor are supported")
if path_tensor.dtype != dtypes.string:
raise TypeError("File paths should be string")
str_value = path_tensor.op.get_attr("value").string_val
if len(str_value) != 1:
raise TypeError("Only scalar tensors are supported")
return str_value[0]
| apache-2.0 |
F0rth/seafile-obsd-wip | web/main.py | 1 | 35100 | #!/usr/bin/env python2
# encoding: utf-8
import gettext
import locale
import os
import simplejson as json
import sys
import platform
import urllib
import web
from web.contrib.template import render_mako
import settings
from seaserv import CCNET_CONF_PATH
from seaserv import ccnet_rpc, seafile_rpc, applet_rpc
from seaserv import get_peers_by_role
from seaserv import get_repos, get_repo, get_commits, \
get_branches, open_dir, get_diff, \
get_default_seafile_worktree, \
get_current_prefs
from pysearpc import SearpcError
urls = (
'/', 'repos',
'/opendir/', 'open_directory',
'/home/', 'repos',
'/repos/', 'repos',
'/repo/', 'repo',
'/repo/history/', 'repo_history',
'/repo/setting/', 'repo_setting',
'/repo/sync-status/', 'repo_sync_status',
'/repo/transfer/', 'repo_transfer',
'/repos/download-tasks/', 'CloneTasks',
'/repos/clone-tasks/', 'clone_tasks',
'/repo/download/', 'repo_download',
'/repo/sync/', 'repo_sync',
'/repos/operation/', 'repo_operation',
'/procs/', 'procs',
'/settings/', 'settings_page',
'/i18n/', 'i18n',
'/seafile_access_check/', 'seafile_access_check',
'/open-local-file/', 'open_local_file',
'/seafile_rpc_version/', 'seafile_rpc_version',
)
# See http://www.py2exe.org/index.cgi/WhereAmI
if 'win32' in sys.platform and hasattr(sys, 'frozen'):
__file__ = sys.executable
curdir = os.path.abspath(os.path.dirname(__file__))
localedir = os.path.join(curdir, 'i18n')
if "darwin" == sys.platform and hasattr(sys, 'frozen'):
sys.path.append(curdir)
NET_STATE_CONNECTED = 1
lang_code = locale.getdefaultlocale()[0]
if lang_code == 'zh_CN':
DEFAULT_LANG = 'zh_CN'
else:
DEFAULT_LANG = 'en_US'
lang_in_use = None
gettext.install('messages', localedir, unicode=True)
gettext.translation('messages', localedir,
languages=[DEFAULT_LANG]).install(True)
render = render_mako(directories=['templates'],
output_encoding='utf-8', input_encoding='utf-8',
default_filters=['decode.utf8'])
app = web.application(urls, globals())
SEAFILE_VERSION = '1.7'
default_options = { "confdir": CCNET_CONF_PATH,
'web_ctx': web.ctx,
'seafile_version': SEAFILE_VERSION,
'lang': DEFAULT_LANG,
'settings': settings,
}
def get_relay_of_repo(repo):
if not repo:
return None
relay = None
try:
if repo.props.relay_id:
relay = ccnet_rpc.get_peer(repo.props.relay_id)
except:
return None
return relay
def get_dir_nav_links(repo, commit_id, path):
"""Get every folder on the path from repo root to [path]. Return value is
in this format:
[(root, href-to-root), (level-1-folder, href-to-level-1), ... (path, href-to-path)]
"""
names = []
links = []
if path != u'/':
names = path[1:].split(u'/')
for idx,name in enumerate(names):
current_path = u'/' + u'/'.join(names[:idx+1])
quoted_path = urllib.quote(current_path.encode('utf-8'))
href = "/repos/operation/?repo=%s&commit_id=%s&path=%s&op=dir" \
% (repo.props.id, commit_id, quoted_path)
links.append(href)
# insert root link in the front
names.insert(0, repo.props.name)
href = "/repos/operation/?repo=%s&commit_id=%s&op=dir" % (repo.props.id, commit_id)
links.insert(0, href)
return zip(names, links)
class open_directory:
def GET(self):
path = web.webapi.input(path='').path
if path:
open_dir(path)
referer = web.ctx.env.get('HTTP_REFERER', '/home/')
raise web.seeother(referer)
def prepare_repo_info(repo):
"""Get various types of information belong to the repo."""
### get branch information
repo.branches = get_branches(repo.props.id)
repo.current_branch = None
repo.master_branch = None
repo.local_branch = None
for branch in repo.branches:
if branch.props.name == "master":
repo.master_branch = branch
elif branch.props.name == "local":
repo.local_branch = branch
if branch.props.name == repo.props.head_branch:
repo.current_branch = branch
### transfer task information and sync info
repo.sync_info = seafile_rpc.get_repo_sync_info(repo.props.id)
class repos:
def show_repos(self):
# relay info
relays = get_peers_by_role ("MyRelay")
# remove name unresolved relay
relays = [relay for relay in relays if relay.name]
# get repos info
repos = get_repos()
for repo in repos:
# is_broken is not used now, we should clean it later
repo.is_broken = False
try:
prepare_repo_info(repo)
except SearpcError, e:
repo.is_broken = True
repo.error_msg = e.msg
for relay in relays:
relay.repos = []
for repo in repos:
if relay.props.id == repo.props.relay_id:
relay.repos.append(repo)
repo.relay = relay
repos.sort(key=lambda x: x.props.last_modify, reverse=True)
return render.repos(repos=repos,
relays=relays,
**default_options)
def GET(self):
# Set language preference on the first load of home page
global lang_in_use
if not lang_in_use:
lang_in_use = seafile_rpc.get_config('lang_in_use')
if not lang_in_use:
seafile_rpc.set_config('lang_in_use', DEFAULT_LANG)
lang_in_use = DEFAULT_LANG
gettext.translation('messages', localedir,
languages=[lang_in_use]).install(True)
default_options['lang'] = lang_in_use
return self.show_repos()
class repo:
"""Show a specific repo."""
def show_repo(self, repo_id):
repo = seafile_rpc.get_repo(repo_id)
if not repo:
return render.repo_missing(repo_id=repo_id, **default_options)
try:
prepare_repo_info(repo)
recent_commits = get_commits(repo_id, 0, 3)
repo.is_broken = False
except SearpcError, e:
repo.is_broken = True
recent_commits = []
repo.error_msg = e.msg
relay = get_relay_of_repo(repo)
relay_addr = seafile_rpc.get_repo_relay_address(repo_id)
relay_port = seafile_rpc.get_repo_relay_port(repo_id)
return render.repo(repo=repo,
recent_commits=recent_commits,
relay=relay,
relay_addr=relay_addr,
relay_port=relay_port,
**default_options)
def GET(self):
inputs = web.webapi.input(repo='')
return self.show_repo(inputs.repo)
class repo_history:
def show_repo_history(self, repo_id):
repo = seafile_rpc.get_repo(repo_id)
prepare_repo_info(repo)
inputs = web.webapi.input(page="1", per_page="25")
current_page = int(inputs.page)
per_page = int(inputs.per_page)
commits_all = get_commits(repo_id, per_page * (current_page - 1), per_page + 1)
commits = commits_all[:per_page]
if len(commits_all) == per_page + 1:
page_next = True
else:
page_next = False
return render.repo_history(repo=repo,
commits=commits,
current_page=current_page,
per_page=per_page,
page_next=page_next,
**default_options)
def GET(self):
inputs = web.webapi.input(repo='')
return self.show_repo_history(inputs.repo)
class repo_transfer:
def GET(self):
inputs = web.webapi.input(repo='')
task = {}
t = seafile_rpc.find_transfer_task(inputs.repo)
if t:
task['ttype'] = t.props.ttype
task['state'] = t.props.state
task['rt_state'] = t.props.rt_state
task['block_done'] = t.props.block_done
task['block_total'] = t.props.block_total
task['rate'] = t.props.rate
task['error_str'] = t.props.error_str
return json.dumps(task)
class repo_sync_status:
def GET(self):
inputs = web.webapi.input(repo='')
sync_status = {}
repo = get_repo(inputs.repo)
if not repo or not repo.props.worktree or not repo.props.head_branch:
return json.dumps(sync_status)
relay = get_relay_of_repo(repo)
if relay:
if not relay.props.is_ready:
if relay.net_state != NET_STATE_CONNECTED:
sync_status['state'] = 'relay not connected'
else:
sync_status['state'] = 'relay authenticating'
return json.dumps(sync_status)
t = seafile_rpc.get_repo_sync_task(inputs.repo)
if t:
if t.props.state == 'error' and t.props.error == 'relay not connected':
# Hide the 'relay not connected' error from daemon when relay
# is actually connected, but the check sync pulse has not come yet
sync_status['state'] = 'waiting for sync'
return json.dumps(sync_status)
elif t.props.state == 'canceled' or t.props.state == 'cancel pending':
sync_status['state'] = 'waiting for sync'
else:
sync_status['state'] = t.props.state
sync_status['is_sync_lan'] = t.props.is_sync_lan
sync_status['error'] = t.props.error
else:
# No sync task yet: seafile maybe have just been started
sync_status['state'] = 'waiting for sync'
auto_sync_enabled = seafile_rpc.is_auto_sync_enabled()
if not auto_sync_enabled or not repo.props.auto_sync:
sync_status['state'] = 'auto sync is turned off'
return json.dumps(sync_status)
class repo_operation:
def perform_operation_get(self, op, repo_id):
repo = get_repo(repo_id)
if not repo:
raise web.seeother('/repos/')
if op == 'sync':
try:
seafile_rpc.sync(repo.props.id, None)
except:
pass
elif op == 'open' and repo.props.worktree:
try:
open_dir(repo.props.worktree.encode('utf-8'))
except:
pass
referer = web.ctx.env.get('HTTP_REFERER', '/home/')
raise web.seeother(referer)
elif op == 'open_file':
quote_file_path = web.webapi.input(quote_file_path='').file_path
file_path = quote_file_path.encode('utf-8')
dir_path = file_path
if os.path.exists(file_path) and os.path.isfile(file_path):
dir_path = os.path.dirname(file_path)
try:
open_dir(dir_path)
except:
pass
return render.checkout_msg(repo=repo, file_path=file_path, **default_options)
elif op == 'diff':
inputs = web.webapi.input(old='', new='')
new_commit = seafile_rpc.get_commit(inputs.new)
if inputs.old != '':
old_commit = seafile_rpc.get_commit(inputs.old)
else:
old_commit = None
(new, removed, renamed, modified, newdir, deldir) = get_diff(repo_id, inputs.old, inputs.new)
return render.repo_diff(repo=repo,
new=new, removed=removed,
renamed=renamed, modified=modified,
newdir=newdir, deldir=deldir,
new_commit=new_commit, old_commit=old_commit,
**default_options)
elif op == 'lsch':
inputs = web.webapi.input(old='', new='')
(new, removed, renamed, modified, newdir, deldir) = get_diff(repo_id, inputs.old, inputs.new)
ch = {}
ch['new'] = new
ch['removed'] = removed
ch['renamed'] = renamed
ch['modified'] = modified
ch['newdir'] = newdir
ch['deldir'] = deldir
return json.dumps(ch)
elif op == 'dir':
inputs = web.webapi.input(commit_id='', path='/')
dirs = seafile_rpc.list_dir_by_path(inputs.commit_id, inputs.path.encode('utf-8'))
navs = get_dir_nav_links(repo, inputs.commit_id, inputs.path)
try:
commit = seafile_rpc.get_commit(inputs.commit_id)
except SearpcError:
raise web.seeother('/repo/?repo=%s' % repo_id)
return render.repo_dir(repo=repo, dirs=dirs, commit_id=inputs.commit_id,
commit=commit,
navs=navs,
path=inputs.path,
**default_options)
elif op == 'remove':
try:
seafile_rpc.remove_repo(repo_id)
except:
pass
raise web.seeother('/repos/')
elif op == 'set-auto-sync':
auto_sync = {}
try:
seafile_rpc.set_repo_property(repo_id, "auto-sync", "true")
except:
pass
auto_sync['start'] = True
return json.dumps(auto_sync)
elif op == 'set-manual-sync':
auto_sync = {}
try:
seafile_rpc.set_repo_property(repo_id, "auto-sync", "false")
except:
pass
auto_sync['start'] = False
return json.dumps(auto_sync)
referer = web.ctx.env.get('HTTP_REFERER', '/home/')
raise web.seeother(referer)
def perform_operation_post(self, op, repo_id):
repo = get_repo(repo_id)
if not repo:
raise web.seeother('/repos/')
if op == 'modify-relay':
relay_id = web.webapi.input(relay_id="").relay_id
if relay_id != repo.props.relay_id:
seafile_rpc.set_repo_property(repo.props.id,
"relay-id", relay_id)
elif op == 'set-passwd':
passwd = web.webapi.input(passwd="").passwd
if passwd:
seafile_rpc.set_repo_passwd(repo.props.id, passwd)
elif op == 'edit-relay':
inputs = web.webapi.input(relay_addr='', relay_port='')
if inputs.relay_addr and inputs.relay_port:
seafile_rpc.update_repo_relay_info(repo_id,
inputs.relay_addr,
inputs.relay_port)
referer = web.ctx.env.get('HTTP_REFERER', '/home/')
raise web.seeother(referer)
def GET(self):
inputs = web.webapi.input(op='', repo='')
if inputs.op and inputs.repo:
return self.perform_operation_get(inputs.op, inputs.repo)
raise web.seeother('/repos/')
def POST(self):
inputs = web.webapi.input(op='', repo='')
if inputs.op and inputs.repo:
return self.perform_operation_post(inputs.op, inputs.repo)
raise web.seeother('/repos/')
class CloneTasks:
def GET(self):
inputs = web.webapi.input(op='', repo_id='')
if inputs.op and inputs.repo_id:
if inputs.op == "remove":
seafile_rpc.remove_clone_task(inputs.repo_id)
elif inputs.op == "cancel":
seafile_rpc.cancel_clone_task(inputs.repo_id)
raise web.seeother('/repos/download-tasks/')
return render.clone_tasks(**default_options)
class clone_tasks:
def GET(self):
ts = []
tasks = seafile_rpc.get_clone_tasks()
for task in tasks:
t = {}
t['repo_id'] = task.props.repo_id
t['repo_name'] = task.props.repo_name
t['state'] = task.props.state
t['error_str'] = task.props.error_str
t['worktree'] = task.props.worktree
tx_task = False
checkout_task = False
if task.props.state == "fetch":
tx_task = seafile_rpc.find_transfer_task(task.props.repo_id)
t['tx_block_done'] = tx_task.props.block_done
t['tx_block_total'] = tx_task.props.block_total
elif task.props.state == "checkout":
checkout_task = seafile_rpc.get_checkout_task(task.props.repo_id)
t['checkout_finished_files'] = checkout_task.props.finished_files
t['checkout_total_files'] = checkout_task.props.total_files
elif task.props.state == "error" and task.props.error_str == "fetch":
tx_task = seafile_rpc.find_transfer_task(task.props.repo_id)
t['tx_error_str'] = tx_task.props.error_str
elif task.props.state == "error" and task.props.error_str == "password":
t['relay_id'] = task.props.peer_id
ts.append(t)
Tasks = {}
Tasks['tasks'] = ts
return json.dumps(Tasks)
class repo_download:
def GET(self):
inputs = web.webapi.input(relay_id='', token='',
relay_addr='', relay_port = '',
repo_id='', repo_name='',
encrypted='', magic='', email='')
relay_id = inputs.relay_id
token = inputs.token
relay_addr = inputs.relay_addr
relay_port = inputs.relay_port
repo_id = inputs.repo_id
repo_name = inputs.repo_name
email = inputs.email
if seafile_rpc.get_repo(inputs.repo_id):
return render.repo_download(repo_already_exists=True,
**default_options)
tasks = seafile_rpc.get_clone_tasks()
for task in tasks:
if task.props.repo_id == inputs.repo_id:
if task.props.state != 'done' and task.props.state != 'error' \
and task.props.state != 'canceled':
raise web.seeother('/repos/download-tasks/')
wt_parent = get_default_seafile_worktree ()
sync_url = "/repo/sync/?relay_id=%s&relay_addr=%s&relay_port=%s&" \
"email=%s&token=%s&repo_id=%s&repo_name=%s" % \
(relay_id, relay_addr, relay_port, urllib.quote(email), token, repo_id,
urllib.quote(repo_name.encode('utf-8')))
if inputs.encrypted:
sync_url += "&encrypted=1&magic=%s" % inputs.magic
return render.repo_download(error_msg=None,
repo_already_exists=False,
repo_id=inputs.repo_id,
relay_id=inputs.relay_id,
token=token,
relay_addr=relay_addr,
relay_port=relay_port,
repo_name=repo_name,
wt_parent=wt_parent,
encrypted=inputs.encrypted,
magic=inputs.magic,
email=email,
sync_url=sync_url,
**default_options)
def POST(self):
inputs = web.webapi.input(relay_id='', token='',
relay_addr='', relay_port = '',
repo_id='', repo_name='',
encrypted='', password='', magic='',
wt_parent='', email='')
sync_url = "/repo/sync/?relay_id=%s&relay_addr=%s&relay_port=%s&" \
"email=%s&token=%s&repo_id=%s&repo_name=%s" % \
(inputs.relay_id, inputs.relay_addr, inputs.relay_port,
urllib.quote(inputs.email), inputs.token, inputs.repo_id,
urllib.quote(inputs.repo_name.encode('utf-8')))
if inputs.encrypted:
sync_url += "&encrypted=1&magic=%s" % inputs.magic
error_msg = None
if not inputs.wt_parent:
error_msg = _("You must choose a local directory")
elif inputs.encrypted and not inputs.password:
error_msg=_("Password can not be empty")
elif len(inputs.repo_id) != 36:
error_msg=_("Invalid Repo ID")
if error_msg:
return render.repo_download (error_msg=error_msg,
repo_already_exists=False,
repo_id=inputs.repo_id,
relay_id=inputs.relay_id,
relay_addr=inputs.relay_addr,
relay_port=inputs.relay_port,
token=inputs.token,
repo_name=inputs.repo_name,
encrypted=inputs.encrypted,
magic=inputs.magic,
wt_parent=inputs.wt_parent,
email=inputs.email,
sync_url=sync_url,
**default_options)
if not inputs.password:
inputs.password = None
if not inputs.magic:
inputs.magic = None
try:
seafile_rpc.download (inputs.repo_id, inputs.relay_id,
inputs.repo_name.encode('utf-8'),
inputs.wt_parent.encode('utf-8'),
inputs.token,
inputs.password,
inputs.magic,
inputs.relay_addr,
inputs.relay_port,
inputs.email)
except SearpcError as e:
if e.msg == 'Invalid local directory':
error_msg = _('Invalid local directory')
elif e.msg == 'Already in sync':
error_msg = _('The local directory you chose is in sync with another repo. Please choose another one.')
elif e.msg == 'Worktree conflicts system path':
error_msg = _('The local directory you chose cannot be under or includes a system directory of seafile.')
elif e.msg == 'Worktree conflicts existing repo':
error_msg = _('The local directory you chose cannot be under or includes another library.')
elif e.msg == 'Incorrect password':
error_msg = _('Incorrect password.')
else:
error_msg = _('Internal error.') + str(e)
if error_msg:
return render.repo_download (error_msg=error_msg,
repo_already_exists=False,
repo_id=inputs.repo_id,
relay_id=inputs.relay_id,
relay_addr=inputs.relay_addr,
relay_port=inputs.relay_port,
token=inputs.token,
repo_name=inputs.repo_name,
encrypted=inputs.encrypted,
password=inputs.password,
magic=inputs.magic,
wt_parent=inputs.wt_parent,
email=inputs.email,
sync_url=sync_url,
**default_options)
raise web.seeother('/repos/download-tasks/')
class repo_sync:
def GET(self):
inputs = web.webapi.input(relay_id='', token='',
relay_addr='', relay_port = '',
repo_id='', repo_name='',
encrypted='', magic='', email='')
relay_id = inputs.relay_id
token = inputs.token
relay_addr = inputs.relay_addr
relay_port = inputs.relay_port
repo_id = inputs.repo_id
repo_name = inputs.repo_name
email = inputs.email
if seafile_rpc.get_repo(inputs.repo_id):
return render.repo_sync(repo_already_exists=True, **default_options)
tasks = seafile_rpc.get_clone_tasks()
for task in tasks:
if task.props.repo_id == inputs.repo_id:
if task.props.state != 'done' and task.props.state != 'error' \
and task.props.state != 'canceled':
raise web.seeother('/repos/download-tasks/')
return render.repo_sync(error_msg=None,
repo_already_exists=False,
repo_id=inputs.repo_id,
relay_id=inputs.relay_id,
token=token,
relay_addr=relay_addr,
relay_port=relay_port,
repo_name=repo_name,
worktree='',
encrypted=inputs.encrypted,
magic=inputs.magic,
email=email,
**default_options)
def POST(self):
inputs = web.webapi.input(relay_id='', token='',
relay_addr='', relay_port = '',
repo_id='', repo_name='',
encrypted='', password='', magic='',
worktree='', email='')
repo_id = inputs.repo_id.strip()
error_msg = None
if not inputs.worktree:
error_msg = _("You must choose a local directory")
elif inputs.encrypted and not inputs.password:
error_msg=_("Password can not be empty")
elif len(repo_id) != 36:
error_msg=_("Invalid Repo ID")
if error_msg:
return render.repo_sync (error_msg=error_msg,
repo_already_exists=False,
repo_id=repo_id,
relay_id=inputs.relay_id,
relay_addr=inputs.relay_addr,
relay_port=inputs.relay_port,
token=inputs.token,
repo_name=inputs.repo_name,
encrypted=inputs.encrypted,
magic=inputs.magic,
worktree=inputs.worktree,
email=inputs.email,
**default_options)
if not inputs.password:
inputs.password = None
if not inputs.magic:
inputs.magic = None
try:
seafile_rpc.clone (repo_id, inputs.relay_id,
inputs.repo_name.encode('utf-8'),
inputs.worktree.encode('utf-8'),
inputs.token,
inputs.password,
inputs.magic,
inputs.relay_addr, inputs.relay_port, inputs.email)
except SearpcError as e:
if e.msg == 'Invalid local directory':
error_msg = _('Invalid local directory')
elif e.msg == 'Already in sync':
error_msg = _('The local directory you chose is in sync with another repo. Please choose another one.')
elif e.msg == 'Worktree conflicts system path':
error_msg = _('The local directory you chose cannot be under or includes a system directory of seafile.')
elif e.msg == 'Worktree conflicts existing repo':
error_msg = _('The local directory you chose cannot be under or includes another library.')
elif e.msg == 'Incorrect password':
error_msg = _('Incorrect password.')
else:
error_msg = _('Internal error.') + str(e)
if error_msg:
return render.repo_sync (error_msg=error_msg,
repo_already_exists=False,
repo_id=repo_id,
relay_id=inputs.relay_id,
relay_addr=inputs.relay_addr,
relay_port=inputs.relay_port,
token=inputs.token,
repo_name=inputs.repo_name,
encrypted=inputs.encrypted,
magic=inputs.magic,
worktree=inputs.worktree,
email=inputs.email,
**default_options)
raise web.seeother('/repos/download-tasks/')
class settings_page:
def GET(self):
current_prefs = get_current_prefs()
return render.settings(prefs=current_prefs, **default_options)
def POST(self):
current_prefs = get_current_prefs()
inputs = web.webapi.input(auto_start='off', notify_sync='off',
encrypt_channel='off',
upload_limit='', download_limit='')
applet_rpc.set_auto_start(inputs.auto_start)
if inputs.notify_sync != current_prefs['notify_sync']:
seafile_rpc.set_config('notify_sync', inputs.notify_sync)
if inputs.encrypt_channel != current_prefs['encrypt_channel']:
ccnet_rpc.set_config('encrypt_channel', inputs.encrypt_channel)
if not inputs.upload_limit:
upload_limit = 0
else:
try:
upload_limit = int(inputs.upload_limit) * 1024
except:
upload_limit = 0
if not inputs.download_limit:
download_limit = 0
else:
try:
download_limit = int(inputs.download_limit) * 1024
except:
download_limit = 0
if upload_limit != current_prefs['upload_limit']:
seafile_rpc.set_upload_rate_limit(upload_limit)
if download_limit != current_prefs['download_limit']:
seafile_rpc.set_download_rate_limit(download_limit)
raise web.seeother('/settings/')
class procs:
def GET(self):
aprocs = ccnet_rpc.get_procs_alive(0, -1)
dprocs = ccnet_rpc.get_procs_dead(0, -1)
acnt = ccnet_rpc.count_procs_alive()
dcnt = ccnet_rpc.count_procs_dead()
return render.procs(aprocs=aprocs, dprocs=dprocs,
acnt=acnt, dcnt=dcnt, **default_options)
class i18n:
def GET(self):
global lang_in_use
if lang_in_use == 'zh_CN':
lang_in_use = 'en_US'
else:
lang_in_use = 'zh_CN'
gettext.translation('messages', localedir,
languages=[lang_in_use]).install(True)
seafile_rpc.set_config('lang_in_use', lang_in_use)
default_options['lang'] = lang_in_use
inputs = web.webapi.input(prev='/home/')
raise web.seeother(inputs.prev)
# for seahub repo download
class seafile_access_check:
"""For seahub to check whether local seafile is started when downloading a
repo. For a bug in the released server 0.9.5, here we need always return
2.
"""
def GET(self):
return 'xx(2)'
class seafile_rpc_version:
"""For the server to query current seafile client rpc version"""
def GET(self):
version = 1
return 'xx(%s)' % json.dumps(version)
class open_local_file:
"""
handle jsonp ajax cross domain 'open-local-file' request from seahub
"""
def GET(self):
inputs = web.webapi.input(repo_id='', path='', callback='', commit_id='')
repo_id, path, callback = inputs.repo_id, inputs.path.lstrip('/'), inputs.callback
d = {}
if not (repo_id and path and callback):
d['error'] = 'invalid request'
return '%s(%s)' % (inputs.callback, json.dumps(d))
try:
repo = get_repo(repo_id)
except Exception, e:
d['error'] = str(e)
return '%s(%s)' % (inputs.callback, json.dumps(d))
else:
if not repo:
d['exists'] = False
return '%s(%s)' % (inputs.callback, json.dumps(d))
if inputs.commit_id:
if repo.head_cmmt_id != inputs.commit_id:
d['outdated'] = True
d['auto-sync'] = repo.auto_sync
return '%s(%s)' % (inputs.callback, json.dumps(d))
# ok, repo exists
file_path = os.path.join(repo.worktree, path)
uname = platform.platform()
err_msg = ''
if 'Windows' in uname:
try:
os.startfile(file_path)
except WindowsError, e:
if e.winerror == 1155:
# windows error 1155: no default application for this file type
d['no_assoc'] = True
try:
# try to open the folder instead
os.startfile(os.path.dirname(file_path))
except:
pass
else:
err_msg = str(e)
elif 'Linux' in uname:
file_path = file_path.encode('utf-8')
try:
os.system('xdg-open "%s"' % file_path)
except Exception, e:
err_msg = str(e)
elif 'Darwin' in uname:
# what to do in mac?
file_path = file_path.encode('utf-8')
try:
os.system('open "%s"' % file_path)
except Exception, e:
err_msg = str(e)
if err_msg:
d['error'] = err_msg
return '%s(%s)' % (inputs.callback, json.dumps(d))
if __name__ == "__main__":
app.run()
| gpl-3.0 |
dmort27/panphon | panphon/bin/align_wordlists.py | 1 | 2458 | #!/usr/bin/env python
from __future__ import print_function
import unicodecsv as csv
import argparse
import panphon
import Levenshtein
import munkres
import panphon.distance
from functools import partial
def levenshtein_dist(_, a, b):
return Levenshtein.distance(a, b)
def dogol_leven_dist(_, a, b):
return Levenshtein.distance(dist.map_to_dogol_prime(a),
dist.map_to_dogol_prime(b))
def feature_hamming_dist(dist, a, b):
return dist.feature_edit_distance(a, b)
def feature_weighted_dist(dist, a, b):
return dist.weighted_feature_edit_distance(a, b)
def construct_cost_matrix(words_a, words_b, dist):
def matrix_row(word_a, words_b):
return [dist(word_a, word_b) for (word_b, _) in words_b]
return [matrix_row(word_a, words_b) for (word_a, _) in words_a]
def score(indices):
pairs, errors = 0, 0
for row, column in indices:
pairs += 1
if row != column:
errors += 1
return pairs, errors
def main(wordlist1, wordlist2, dist_funcs):
with open(wordlist1, 'rb') as file_a, open(wordlist2, 'rb') as file_b:
reader_a = csv.reader(file_a, encoding='utf-8')
reader_b = csv.reader(file_b, encoding='utf-8')
print('Reading word lists...')
words = zip([(w, g) for (g, w) in reader_a],
[(w, g) for (g, w) in reader_b])
words_a, words_b = zip(*[(a, b) for (a, b) in words if a and b])
print('Constructing cost matrix...')
matrix = construct_cost_matrix(words_a, words_b, dist_funcs)
m = munkres.Munkres()
print('Computing matrix using Hungarian Algorithm...')
indices = m.compute(matrix)
print(score(indices))
print('Done.')
if __name__ == '__main__':
parser = argparse.ArgumentParser(usage='Align two lists of "cognates" using a specified distance metric.')
parser.add_argument('wordlists', nargs=2, help='Filenames of two wordlists in corresponding order.')
parser.add_argument('-d', '--dist', default='hamming', help='Distance metric (e.g. Hamming).')
args = parser.parse_args()
dists = {'levenshtein': levenshtein_dist,
'dogol-leven': dogol_leven_dist,
'hamming': feature_hamming_dist,
'weighted': feature_weighted_dist}
dist = panphon.distance.Distance()
dist_funcs = partial(dists[args.dist], dist)
main(args.wordlists[0], args.wordlists[1], dist_funcs)
| mit |
jounex/hue | desktop/core/ext-py/Django-1.6.10/django/contrib/humanize/templatetags/humanize.py | 98 | 9276 | # -*- encoding: utf-8 -*-
from __future__ import unicode_literals
import re
from datetime import date, datetime
from decimal import Decimal
from django import template
from django.conf import settings
from django.template import defaultfilters
from django.utils.encoding import force_text
from django.utils.formats import number_format
from django.utils.translation import pgettext, ungettext, ugettext as _
from django.utils.timezone import is_aware, utc
register = template.Library()
@register.filter(is_safe=True)
def ordinal(value):
"""
Converts an integer to its ordinal as a string. 1 is '1st', 2 is '2nd',
3 is '3rd', etc. Works for any integer.
"""
try:
value = int(value)
except (TypeError, ValueError):
return value
suffixes = (_('th'), _('st'), _('nd'), _('rd'), _('th'), _('th'), _('th'), _('th'), _('th'), _('th'))
if value % 100 in (11, 12, 13): # special case
return "%d%s" % (value, suffixes[0])
return "%d%s" % (value, suffixes[value % 10])
@register.filter(is_safe=True)
def intcomma(value, use_l10n=True):
"""
Converts an integer to a string containing commas every three digits.
For example, 3000 becomes '3,000' and 45000 becomes '45,000'.
"""
if settings.USE_L10N and use_l10n:
try:
if not isinstance(value, (float, Decimal)):
value = int(value)
except (TypeError, ValueError):
return intcomma(value, False)
else:
return number_format(value, force_grouping=True)
orig = force_text(value)
new = re.sub("^(-?\d+)(\d{3})", '\g<1>,\g<2>', orig)
if orig == new:
return new
else:
return intcomma(new, use_l10n)
# A tuple of standard large number to their converters
intword_converters = (
(6, lambda number: (
ungettext('%(value).1f million', '%(value).1f million', number),
ungettext('%(value)s million', '%(value)s million', number),
)),
(9, lambda number: (
ungettext('%(value).1f billion', '%(value).1f billion', number),
ungettext('%(value)s billion', '%(value)s billion', number),
)),
(12, lambda number: (
ungettext('%(value).1f trillion', '%(value).1f trillion', number),
ungettext('%(value)s trillion', '%(value)s trillion', number),
)),
(15, lambda number: (
ungettext('%(value).1f quadrillion', '%(value).1f quadrillion', number),
ungettext('%(value)s quadrillion', '%(value)s quadrillion', number),
)),
(18, lambda number: (
ungettext('%(value).1f quintillion', '%(value).1f quintillion', number),
ungettext('%(value)s quintillion', '%(value)s quintillion', number),
)),
(21, lambda number: (
ungettext('%(value).1f sextillion', '%(value).1f sextillion', number),
ungettext('%(value)s sextillion', '%(value)s sextillion', number),
)),
(24, lambda number: (
ungettext('%(value).1f septillion', '%(value).1f septillion', number),
ungettext('%(value)s septillion', '%(value)s septillion', number),
)),
(27, lambda number: (
ungettext('%(value).1f octillion', '%(value).1f octillion', number),
ungettext('%(value)s octillion', '%(value)s octillion', number),
)),
(30, lambda number: (
ungettext('%(value).1f nonillion', '%(value).1f nonillion', number),
ungettext('%(value)s nonillion', '%(value)s nonillion', number),
)),
(33, lambda number: (
ungettext('%(value).1f decillion', '%(value).1f decillion', number),
ungettext('%(value)s decillion', '%(value)s decillion', number),
)),
(100, lambda number: (
ungettext('%(value).1f googol', '%(value).1f googol', number),
ungettext('%(value)s googol', '%(value)s googol', number),
)),
)
@register.filter(is_safe=False)
def intword(value):
"""
Converts a large integer to a friendly text representation. Works best
for numbers over 1 million. For example, 1000000 becomes '1.0 million',
1200000 becomes '1.2 million' and '1200000000' becomes '1.2 billion'.
"""
try:
value = int(value)
except (TypeError, ValueError):
return value
if value < 1000000:
return value
def _check_for_i18n(value, float_formatted, string_formatted):
"""
Use the i18n enabled defaultfilters.floatformat if possible
"""
if settings.USE_L10N:
value = defaultfilters.floatformat(value, 1)
template = string_formatted
else:
template = float_formatted
return template % {'value': value}
for exponent, converters in intword_converters:
large_number = 10 ** exponent
if value < large_number * 1000:
new_value = value / float(large_number)
return _check_for_i18n(new_value, *converters(new_value))
return value
@register.filter(is_safe=True)
def apnumber(value):
"""
For numbers 1-9, returns the number spelled out. Otherwise, returns the
number. This follows Associated Press style.
"""
try:
value = int(value)
except (TypeError, ValueError):
return value
if not 0 < value < 10:
return value
return (_('one'), _('two'), _('three'), _('four'), _('five'), _('six'), _('seven'), _('eight'), _('nine'))[value-1]
# Perform the comparison in the default time zone when USE_TZ = True
# (unless a specific time zone has been applied with the |timezone filter).
@register.filter(expects_localtime=True)
def naturalday(value, arg=None):
"""
For date values that are tomorrow, today or yesterday compared to
present day returns representing string. Otherwise, returns a string
formatted according to settings.DATE_FORMAT.
"""
try:
tzinfo = getattr(value, 'tzinfo', None)
value = date(value.year, value.month, value.day)
except AttributeError:
# Passed value wasn't a date object
return value
except ValueError:
# Date arguments out of range
return value
today = datetime.now(tzinfo).date()
delta = value - today
if delta.days == 0:
return _('today')
elif delta.days == 1:
return _('tomorrow')
elif delta.days == -1:
return _('yesterday')
return defaultfilters.date(value, arg)
# This filter doesn't require expects_localtime=True because it deals properly
# with both naive and aware datetimes. Therefore avoid the cost of conversion.
@register.filter
def naturaltime(value):
"""
For date and time values shows how many seconds, minutes or hours ago
compared to current timestamp returns representing string.
"""
if not isinstance(value, date): # datetime is a subclass of date
return value
now = datetime.now(utc if is_aware(value) else None)
if value < now:
delta = now - value
if delta.days != 0:
return pgettext(
'naturaltime', '%(delta)s ago'
) % {'delta': defaultfilters.timesince(value, now)}
elif delta.seconds == 0:
return _('now')
elif delta.seconds < 60:
return ungettext(
# Translators: please keep a non-breaking space (U+00A0)
# between count and time unit.
'a second ago', '%(count)s seconds ago', delta.seconds
) % {'count': delta.seconds}
elif delta.seconds // 60 < 60:
count = delta.seconds // 60
return ungettext(
# Translators: please keep a non-breaking space (U+00A0)
# between count and time unit.
'a minute ago', '%(count)s minutes ago', count
) % {'count': count}
else:
count = delta.seconds // 60 // 60
return ungettext(
# Translators: please keep a non-breaking space (U+00A0)
# between count and time unit.
'an hour ago', '%(count)s hours ago', count
) % {'count': count}
else:
delta = value - now
if delta.days != 0:
return pgettext(
'naturaltime', '%(delta)s from now'
) % {'delta': defaultfilters.timeuntil(value, now)}
elif delta.seconds == 0:
return _('now')
elif delta.seconds < 60:
return ungettext(
# Translators: please keep a non-breaking space (U+00A0)
# between count and time unit.
'a second from now', '%(count)s seconds from now', delta.seconds
) % {'count': delta.seconds}
elif delta.seconds // 60 < 60:
count = delta.seconds // 60
return ungettext(
# Translators: please keep a non-breaking space (U+00A0)
# between count and time unit.
'a minute from now', '%(count)s minutes from now', count
) % {'count': count}
else:
count = delta.seconds // 60 // 60
return ungettext(
# Translators: please keep a non-breaking space (U+00A0)
# between count and time unit.
'an hour from now', '%(count)s hours from now', count
) % {'count': count}
| apache-2.0 |
nvoron23/hue | apps/oozie/src/oozie/migrations/0009_auto__add_decision.py | 39 | 20608 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Decision'
db.create_table('oozie_decision', (
('node_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['oozie.Node'], unique=True, primary_key=True)),
))
db.send_create_signal('oozie', ['Decision'])
def backwards(self, orm):
# Deleting model 'Decision'
db.delete_table('oozie_decision')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80', 'unique': 'True'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '30', 'unique': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'oozie.coordinator': {
'Meta': {'object_name': 'Coordinator', '_ormbases': ['oozie.Job']},
'concurrency': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'end': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 11, 3, 12, 54, 7, 295114)'}),
'execution': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'frequency_number': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'frequency_unit': ('django.db.models.fields.CharField', [], {'default': "'days'", 'max_length': '20'}),
'job_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Job']", 'unique': 'True', 'primary_key': 'True'}),
'start': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 10, 31, 12, 54, 7, 295060)'}),
'throttle': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'timeout': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'timezone': ('django.db.models.fields.CharField', [], {'default': "'America/Los_Angeles'", 'max_length': '24'}),
'workflow': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oozie.Workflow']", 'null': 'True'})
},
'oozie.datainput': {
'Meta': {'object_name': 'DataInput'},
'coordinator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oozie.Coordinator']"}),
'dataset': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Dataset']", 'unique': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'})
},
'oozie.dataoutput': {
'Meta': {'object_name': 'DataOutput'},
'coordinator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oozie.Coordinator']"}),
'dataset': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Dataset']", 'unique': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'})
},
'oozie.dataset': {
'Meta': {'object_name': 'Dataset'},
'coordinator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oozie.Coordinator']"}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'done_flag': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '64', 'blank': 'True'}),
'frequency_number': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'frequency_unit': ('django.db.models.fields.CharField', [], {'default': "'days'", 'max_length': '20'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'start': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 10, 31, 12, 54, 7, 295858)'}),
'timezone': ('django.db.models.fields.CharField', [], {'default': "'America/Los_Angeles'", 'max_length': '24'}),
'uri': ('django.db.models.fields.CharField', [], {'default': "'/data/${YEAR}${MONTH}${DAY}'", 'max_length': '1024'})
},
'oozie.decision': {
'Meta': {'object_name': 'Decision'},
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'})
},
'oozie.distcp': {
'Meta': {'object_name': 'DistCp'},
'job_properties': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_xml': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'blank': 'True'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'params': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'prepares': ('django.db.models.fields.TextField', [], {'default': "'[]'"})
},
'oozie.end': {
'Meta': {'object_name': 'End'},
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'})
},
'oozie.fork': {
'Meta': {'object_name': 'Fork'},
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'})
},
'oozie.history': {
'Meta': {'object_name': 'History'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'job': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oozie.Job']"}),
'oozie_job_id': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'properties': ('django.db.models.fields.TextField', [], {}),
'submission_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'submitter': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'oozie.hive': {
'Meta': {'object_name': 'Hive'},
'archives': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'files': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_properties': ('django.db.models.fields.TextField', [], {'default': '\'[{"name":"oozie.hive.defaults","value":"hive-default.xml"}]\''}),
'job_xml': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'blank': 'True'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'params': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'prepares': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'script_path': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'oozie.java': {
'Meta': {'object_name': 'Java'},
'archives': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'args': ('django.db.models.fields.CharField', [], {'max_length': '4096', 'blank': 'True'}),
'files': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'jar_path': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'java_opts': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'job_properties': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_xml': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'blank': 'True'}),
'main_class': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'prepares': ('django.db.models.fields.TextField', [], {'default': "'[]'"})
},
'oozie.job': {
'Meta': {'object_name': 'Job'},
'deployment_dir': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_shared': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'parameters': ('django.db.models.fields.TextField', [], {'default': '\'[{"name":"oozie.use.system.libpath","value":"true"}]\''}),
'schema_version': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'oozie.join': {
'Meta': {'object_name': 'Join'},
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'})
},
'oozie.kill': {
'Meta': {'object_name': 'Kill'},
'message': ('django.db.models.fields.CharField', [], {'default': "'Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]'", 'max_length': '256'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'})
},
'oozie.link': {
'Meta': {'object_name': 'Link'},
'child': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parent_node'", 'to': "orm['oozie.Node']"}),
'comment': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'child_node'", 'to': "orm['oozie.Node']"})
},
'oozie.mapreduce': {
'Meta': {'object_name': 'Mapreduce'},
'archives': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'files': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'jar_path': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'job_properties': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_xml': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'blank': 'True'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True'}),
'prepares': ('django.db.models.fields.TextField', [], {'default': "'[]'"})
},
'oozie.node': {
'Meta': {'object_name': 'Node'},
'children': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'parents'", 'symmetrical': 'False', 'through': "orm['oozie.Link']", 'to': "orm['oozie.Node']"}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'node_type': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'workflow': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oozie.Workflow']"})
},
'oozie.pig': {
'Meta': {'object_name': 'Pig'},
'archives': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'files': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_properties': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_xml': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'blank': 'True'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'params': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'prepares': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'script_path': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'oozie.shell': {
'Meta': {'object_name': 'Shell'},
'archives': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'capture_output': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'command': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'files': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_properties': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_xml': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'blank': 'True'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'params': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'prepares': ('django.db.models.fields.TextField', [], {'default': "'[]'"})
},
'oozie.sqoop': {
'Meta': {'object_name': 'Sqoop'},
'archives': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'files': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_properties': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_xml': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'blank': 'True'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'params': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'prepares': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'script_path': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'})
},
'oozie.ssh': {
'Meta': {'object_name': 'Ssh'},
'capture_output': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'command': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'host': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'params': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'oozie.start': {
'Meta': {'object_name': 'Start'},
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True'})
},
'oozie.streaming': {
'Meta': {'object_name': 'Streaming'},
'archives': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'files': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_properties': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'mapper': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'reducer': ('django.db.models.fields.CharField', [], {'max_length': '512'})
},
'oozie.workflow': {
'Meta': {'object_name': 'Workflow', '_ormbases': ['oozie.Job']},
'end': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'end_workflow'", 'blank': 'True', 'null': 'True', 'to': "orm['oozie.End']"}),
'is_single': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'job_properties': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Job']", 'unique': 'True', 'primary_key': 'True'}),
'job_xml': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'blank': 'True'}),
'start': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'start_workflow'", 'blank': 'True', 'null': 'True', 'to': "orm['oozie.Start']"})
}
}
complete_apps = ['oozie']
| apache-2.0 |
yausern/stlab | TimeDomain_v2/AWG_station.py | 2 | 13357 | # author: Wolfgang Pfaff
# modified by: Sarwan Peiter
"""
So I have already written the driver for the AWG.
Now the next step is to write an interface to communicates with driver.
An also usefull interface is to write a library to generate pulses.
"""
import time
import logging
import numpy as np
import struct
import os,sys
from datetime import datetime
import subprocess, threading
import itertools
import re, fnmatch
# some pulses use rounding when determining the correct sample at which to
# insert a particular value. this might require correct rounding -- the pulses
# are typically specified on short time scales, but the time unit we use is
# seconds. therefore we need a suitably chosen digit on which to round. 9 would
# round a pulse to 1 ns precision. 11 is 10 ps, and therefore probably beyond
# the lifetime of this code (no 10ps AWG available yet :))
SIGNIFICANT_DIGITS = 11
# Make a station which represents your instruments and define all the channels
# of the instrument and connections between them
# TODO: function which sets up AWG configuration
# modified by Sarwan Peiter
class AWG_Station():
"""
This object communicates with the AWG520 series
"""
# AWG = None
AWG_type = 'regular'
channels_ids = ['ch1', 'ch1_marker1', 'ch1_marker2',
'ch2', 'ch2_marker1', 'ch2_marker2']
AWG_sequence_cfg = {}
def __init__(self, AWG=None):
self.channels = {}
self.AWG = AWG
self.filename = None
print('init halfway')
if self.AWG is not None:
self.clock = float(self.AWG.get_clock())
print(type(self.clock))
print('init over')
# define channels to for connection to environment
def define_channels(self, id, name, type, delay, offset, high, low, active):
_doubles = []
# Check whether or not channels are already in use!
for c in self.channels:
if self.channels[c]['id'] == id:
logging.warning(
"Channel '%s' already in use, will overwrite." % id)
_doubles.append(c)
for c in _doubles:
del self.channels[c]
self.channels[name] = {'id': id,
'type': type,
'delay': delay,
'offset': offset,
'high': high,
'low': low,
'active': active}
# Get the channels names by id
def get_channel_names_by_id(self, id):
chans = {id: None, id+'_marker1': None, id+'_marker2': None}
for c in self.channels:
if self.channels[c]['id'] in chans:
chans[self.channels[c]['id']] = c
return chans
def get_channel_name_by_id(self, id):
for c in self.channels:
if self.channels[c]['id'] == id:
return c
def get_used_channel_ids(self):
chans = []
for c in self.channels:
if self.channels[c]['active'] and \
self.channels[c]['id'][:3] not in chans:
chans.append(self.channels[c]['id'][:3])
return chans
# Make function which programs AWG
def get_awg_channel_cfg(self):
channel_cfg = {}
self.AWG.get_all()
def delete_all_waveforms(self):
self.AWG.clear_waveforms()
def program_awg(self, sequence,*elements,**kw):
"""
Upload a single file to the AWG (.awg) which contains all waveforms
AND sequence information (i.e. nr of repetitions, event jumps etc)
Advantage is that it's much faster, since sequence information is sent
to the AWG in a single file.
"""
self.AWG.stop()
self.AWG.set_status('off',1)
self.AWG.set_status('off',2)
# self.init_dir()
self.last_sequence = sequence
self.last_elements = elements
# making directory to store waveforms and sequences
# old_timeout = self.AWG.timeout() # whats this function
# self.AWG.timeout(max(180, old_timeout))
verbose = kw.pop('verbose', False)
debug = kw.pop('debug', False)
channels = kw.pop('channels', 'all')
loop = kw.pop('loop', False)
allow_non_zero_first_point_on_trigger_wait = kw.pop('allow_first_zero', False)
elt_cnt = len(elements)
chan_ids = self.get_used_channel_ids()
packed_waveforms = {}
# Store offset settings to restore them after upload the seq
# Note that this is the AWG setting offset, as distinct from the
# channel parameter offset.
elements_with_non_zero_first_points = []
# order the waveforms according to physical AWG channels and
# make empty sequences where necessary
for i, element in enumerate(elements):
if verbose:
print ("%d / %d: %s (%d samples)... " %\
(i+1, elt_cnt, element.name, element.samples()), end = ' ')
_t0 = time.time()
tvals, wfs = element.normalized_waveforms()
for id in chan_ids:
wfname = element.name + '_%s.wfm' % id
chan_wfs = {id: None, id+'_marker1': None, id+'_marker2': None}
grp = self.get_channel_names_by_id(id)
for sid in grp:
if grp[sid] != None and grp[sid] in wfs:
chan_wfs[sid] = wfs[grp[sid]]
if chan_wfs[sid][0] != 0.:
elements_with_non_zero_first_points.append(element.name)
else:
chan_wfs[sid] = np.zeros(element.samples())
# create wform files and send them to AWG
self.AWG.gen_waveform_files(chan_wfs[id],
chan_wfs[id+'_marker1'],
chan_wfs[id+'_marker2'], wfname,
int(element.clock))
# packed_waveforms[wfname] = self.test_send(chan_wfs[id],
# chan_wfs[id+'_marker1'],
# chan_wfs[id+'_marker2'], wfname,
# int(element.clock))
_t = time.time() - _t0
if verbose:
print ("finished in %.2f seconds." % _t)
# sequence programming
_t0 = time.time()
if (sequence.element_count() > 8000):
logging.warning("Error: trying to program '{:s}' ({:d}'".format(
sequence.name, sequence.element_count()) +
" element(s))...\n Sequence contains more than " +
"8000 elements, Aborting", end=' ')
return
print("Programming '%s' (%d element(s)) \t"
% (sequence.name, sequence.element_count()), end=' ')
# determine which channels are involved in the sequence
if channels == 'all':
chan_ids = self.get_used_channel_ids()
else:
chan_ids = []
for c in channels:
if self.channels[c]['id'][:3] not in chan_ids:
chan_ids.append(self.channels[c]['id'][:3])
# Create lists with sequence information:
# wfname_l = list of waveform names [[wf1_ch1,wf2_ch1..],[wf1_ch2,wf2_ch2..],...]
# nrep_l = list specifying the number of reps for each seq element
# wait_l = idem for wait_trigger_state
# goto_l = idem for goto_state (goto is the element where it hops to in case the element is finished)
wfname_l = []
nrep_l = []
wait_l = []
goto_l = []
logic_jump_l = []
for id in chan_ids:
#set all the waveforms
el_wfnames = []
# add all wf names of channel
for elt in sequence.elements:
el_wfnames.append(elt['wfname'] + '_%s.wfm' % id)
# should the name include id nr?
wfname_l.append(el_wfnames)
for elt in sequence.elements:
nrep_l.append(elt['repetitions'])
if (elt['repetitions'] < 1) or (elt['repetitions'] > 65536):
raise Exception('pulsar: The number of repetitions of ' +
'AWG element "%s" are out of range. Valid '
% elt['wfname'] +
'range = 1 to 65536 ("%s" recieved)'
% elt['repetitions'])
if elt['goto_l'] != None:
goto_l.append(sequence.element_index(elt['goto_l']))
else:
goto_l.append(0)
if elt['jump_target'] != None:
logic_jump_l.append(sequence.element_index(elt['jump_target']))
else:
logic_jump_l.append(0)
if elt['trigger_wait']:
wait_l.append(1)
else:
wait_l.append(0)
if loop:
goto_l[-1] = 1
# setting jump modes and loading the djump table
if sequence.djump_table != None and self.AWG_type not in ['opt09']:
raise Exception('AWG Station: The AWG configured does not support dynamic jumping')
if self.AWG_type in ['opt09']:
# TODO as self.AWG_sequence_cfg no longer exists but is generated
# from the sequence_cfg file, make these set the values on the AWG
# itself.
if sequence.djump_table != None:
# self.AWG_sequence_cfg['EVENT_JUMP_MODE'] = 2 # DYNAMIC JUMP
print('AWG set to dynamical jump')
awg_djump_table = np.zeros(16, dtype='l')
for i in list(sequence.djump_table.keys()):
el_idx = sequence.element_index(sequence.djump_table[i])
awg_djump_table[i] = el_idx
# self.AWG_sequence_cfg['TABLE_JUMP_DEFINITION'] = awg_djump_table
else:
# self.AWG_sequence_cfg['EVENT_JUMP_MODE'] = 1 # EVENT JUMP
pass
if debug:
self.check_sequence_consistency(packed_waveforms,
wfname_l,
nrep_l, wait_l, goto_l,
logic_jump_l)
self.filename = sequence.name+'_FILE.seq'
# # Loading the sequence onto the AWG memory
self.AWG.gen_sequence_file(wfname_l[0],wfname_l[1],nrep_l,wait_l,goto_l,logic_jump_l,self.filename)
# self.test_send_sequence2(wfname_l[0],wfname_l[1],nrep_l,wait_l,goto_l,logic_jump_l,self.filename)
time.sleep(.1)
# # # Waits for AWG to be ready
self.AWG.sync_awg()
self.finished= False
self.upload()
self.finished = True
_t = time.time() - _t0
self.AWG.set_sequence(self.filename)
print(" finished in %.2f seconds." % _t)
return 0
def AWGrun(self):
# default mode is triggered
self.AWG.write('*WAI')
self.AWG.set_run_mode('ENH')
self.AWG.set_status('on',1)
self.AWG.set_status('on',2)
self.AWG.start()
def upload(self,folder_path = None, timestamp = None):
# if folder_path is None:
# folder_path = os.getcwd()
# use_latest = True
# if timestamp is not None:
# use_latest = False
# dirname = fnmatch.filter(os.listdir(folder_path),"AwgFiles*")
# dirpath = None
# if use_latest:
# dirpath = os.path.join(os.getcwd(),dirname[-1])
# else:
# pattern = re.findall(r'\d+',timestamp)
# for dir in dirname:
# if pattern == re.findall(r'\d+',dir):
# dirpath = os.path.join(os.getcwd(),dir)
# if dirpath == None:
# raise IOError("Cannot find directory with timestamp {}".format(timestamp))
folder_path = os.getcwd()
dirpath = self.AWG.awg_file_dir
os.chdir(dirpath)
f = open('ftp.txt','w')
f.write('open 192.168.1.51\n')
f.write('\n')
f.write('\n')
f.write('binary\n')
f.write('mput "*.wfm"\n')
f.write('mput "*.seq"\n')
f.write('disconnect\n')
f.write('bye')
f.close()
t = threading.Thread(target=self.animate)
t.start()
if subprocess.call('ftp -v -i -s:ftp.txt') == 0:
os.remove('ftp.txt')
os.path.normpath(os.getcwd() + os.sep + os.pardir)
def animate(self):
sys.stdout.write('uploading waveforms ' + '...')
for c in itertools.cycle(['|', '/', '-', '\\']):
if self.finished:
break
sys.stdout.write('' + c)
sys.stdout.flush()
time.sleep(0.1)
sys.stdout.write('\rDone! ')
def check_sequence_consistency(self, packed_waveforms,
wfname_l,
nrep_l, wait_l, goto_l, logic_jump_l):
'''
Specific for 2 channel tektronix 520 where all channels are used.
'''
if not (len(wfname_l[0]) == len(wfname_l[1]) ==
len(nrep_l) == len(wait_l) == len(goto_l) ==
len(logic_jump_l)):
raise Exception('pulsar: sequence list of elements/properties has unequal length')
# def test_send(self,w,m1,m2,filename,clock):
# """
# Sends a complete waveform. All parameters need to be specified.
# choose a file extension 'wfm' (must end with .pat)
# See also: resend_waveform()
# Input:
# w (float[numpoints]) : waveform
# m1 (int[numpoints]) : marker1
# m2 (int[numpoints]) : marker2
# filename (string) : filename
# clock (int) : frequency (Hz)
# Output:
# None
# """
# logging.debug(__name__ + ' : Generating wfm files %s for instrument' % filename)
# # Check for errors
# dim = len(w)
# if (not((len(w)==len(m1)) and ((len(m1)==len(m2))))):
# return 'error'
# m = m1 + np.multiply(m2,2)
# ws = b''
# for i in range(0,len(w)):
# ws = ws + struct.pack('<fB', w[i], int(m[i]))
# s1 = 'MAGIC 1000\r\n'
# s3 = ws
# s4 = 'CLOCK %.10e\r\n' % clock
# s2 = '#' + str(len(str(len(s3)))) + str(len(s3))
# mes = s1.encode('ASCII') + s2.encode('ASCII') + s3 + s4.encode('ASCII')
# with open(os.path.join(self.dir, filename), 'wb') as d:
# d.write(mes)
# d.close()
# def test_send_sequence2(self,wfs1,wfs2,rep,wait,goto,logic_jump,filename):
# '''
# Sends a sequence file
# Inputs (mandatory):
# wfs1: list of filenames for ch1 (all must end with .pat)
# wfs2: list of filenames for ch2 (all must end with .pat)
# rep: list
# wait: list
# goto: list
# logic_jump: list
# filename: name of output file (must end with .seq)
# Output:
# None
# '''
# logging.debug(__name__ + ' : Generating sequence %s for instrument' % filename)
# N = str(len(rep))
# s1 = 'MAGIC 3002\r\n'
# s3 = 'LINES %s\n'%N
# s4 = ''
# for k in range(len(rep)):
# s4 = s4+ '"%s","%s",%s,%s,%s,%s\r\n'%(wfs1[k],wfs2[k],rep[k],wait[k],goto[k],logic_jump[k])
# mes = s1.encode("ASCII") + s3.encode("ASCII")+ s4.encode("ASCII")
# with open(os.path.join(self.dir, filename), 'wb') as d:
# d.write(mes)
# d.close()
# def init_dir(self):
# print ( 'Initializing directory for AWG file transfering......' )
# self.dir = os.path.join(os.getcwd(),
# 'AwgFiles'+datetime.now().strftime('%Y-%m-%d_%H-%M-%S'))
# try:
# os.makedirs(self.dir)
# except OSError as e:
# if e.errno != errno.EEXIST:
# raise # This was not a "directory exist" error..
| gpl-3.0 |
stewartpark/django | django/contrib/gis/geos/base.py | 437 | 1280 | from ctypes import c_void_p
from django.contrib.gis.geos.error import GEOSException
class GEOSBase(object):
"""
Base object for GEOS objects that has a pointer access property
that controls access to the underlying C pointer.
"""
# Initially the pointer is NULL.
_ptr = None
# Default allowed pointer type.
ptr_type = c_void_p
# Pointer access property.
def _get_ptr(self):
# Raise an exception if the pointer isn't valid don't
# want to be passing NULL pointers to routines --
# that's very bad.
if self._ptr:
return self._ptr
else:
raise GEOSException('NULL GEOS %s pointer encountered.' % self.__class__.__name__)
def _set_ptr(self, ptr):
# Only allow the pointer to be set with pointers of the
# compatible type or None (NULL).
if ptr is None or isinstance(ptr, self.ptr_type):
self._ptr = ptr
else:
raise TypeError('Incompatible pointer type')
# Property for controlling access to the GEOS object pointers. Using
# this raises an exception when the pointer is NULL, thus preventing
# the C library from attempting to access an invalid memory location.
ptr = property(_get_ptr, _set_ptr)
| bsd-3-clause |
rossgoodwin/musapaedia | musapaedia/muse/lib/python2.7/site-packages/setuptools/tests/test_dist_info.py | 148 | 2261 | """Test .dist-info style distributions.
"""
import os
import shutil
import tempfile
import pytest
import pkg_resources
from .textwrap import DALS
class TestDistInfo:
def test_distinfo(self):
dists = dict(
(d.project_name, d)
for d in pkg_resources.find_distributions(self.tmpdir)
)
assert len(dists) == 2, dists
unversioned = dists['UnversionedDistribution']
versioned = dists['VersionedDistribution']
assert versioned.version == '2.718' # from filename
assert unversioned.version == '0.3' # from METADATA
@pytest.mark.importorskip('ast')
def test_conditional_dependencies(self):
specs = 'splort==4', 'quux>=1.1'
requires = list(map(pkg_resources.Requirement.parse, specs))
for d in pkg_resources.find_distributions(self.tmpdir):
assert d.requires() == requires[:1]
assert d.requires(extras=('baz',)) == requires
assert d.extras == ['baz']
metadata_template = DALS("""
Metadata-Version: 1.2
Name: {name}
{version}
Requires-Dist: splort (==4)
Provides-Extra: baz
Requires-Dist: quux (>=1.1); extra == 'baz'
""")
def setup_method(self, method):
self.tmpdir = tempfile.mkdtemp()
dist_info_name = 'VersionedDistribution-2.718.dist-info'
versioned = os.path.join(self.tmpdir, dist_info_name)
os.mkdir(versioned)
with open(os.path.join(versioned, 'METADATA'), 'w+') as metadata_file:
metadata = self.metadata_template.format(
name='VersionedDistribution',
version='',
).replace('\n\n', '\n')
metadata_file.write(metadata)
dist_info_name = 'UnversionedDistribution.dist-info'
unversioned = os.path.join(self.tmpdir, dist_info_name)
os.mkdir(unversioned)
with open(os.path.join(unversioned, 'METADATA'), 'w+') as metadata_file:
metadata = self.metadata_template.format(
name='UnversionedDistribution',
version='Version: 0.3',
)
metadata_file.write(metadata)
def teardown_method(self, method):
shutil.rmtree(self.tmpdir)
| mit |
victorzhao/miniblink49 | third_party/skia/tools/reformat-json.py | 208 | 1741 | #!/usr/bin/python
'''
Copyright 2013 Google Inc.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
'''
'''
Rewrites a JSON file to use Python's standard JSON pretty-print format,
so that subsequent runs of rebaseline.py will generate useful diffs
(only the actual checksum differences will show up as diffs, not obscured
by format differences).
Should not modify the JSON contents in any meaningful way.
'''
# System-level imports
import argparse
import os
import sys
# Imports from within Skia
#
# We need to add the 'gm' directory, so that we can import gm_json.py within
# that directory. That script allows us to parse the actual-results.json file
# written out by the GM tool.
# Make sure that the 'gm' dir is in the PYTHONPATH, but add it at the *end*
# so any dirs that are already in the PYTHONPATH will be preferred.
#
# This assumes that the 'gm' directory has been checked out as a sibling of
# the 'tools' directory containing this script, which will be the case if
# 'trunk' was checked out as a single unit.
GM_DIRECTORY = os.path.realpath(
os.path.join(os.path.dirname(os.path.dirname(__file__)), 'gm'))
if GM_DIRECTORY not in sys.path:
sys.path.append(GM_DIRECTORY)
import gm_json
def Reformat(filename):
print 'Reformatting file %s...' % filename
gm_json.WriteToFile(gm_json.LoadFromFile(filename), filename)
def _Main():
parser = argparse.ArgumentParser(description='Reformat JSON files in-place.')
parser.add_argument('filenames', metavar='FILENAME', nargs='+',
help='file to reformat')
args = parser.parse_args()
for filename in args.filenames:
Reformat(filename)
sys.exit(0)
if __name__ == '__main__':
_Main()
| gpl-3.0 |
cherez/youtube-dl | youtube_dl/extractor/screencast.py | 147 | 4200 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .common import InfoExtractor
from ..compat import (
compat_parse_qs,
compat_urllib_request,
)
from ..utils import (
ExtractorError,
)
class ScreencastIE(InfoExtractor):
_VALID_URL = r'https?://www\.screencast\.com/t/(?P<id>[a-zA-Z0-9]+)'
_TESTS = [{
'url': 'http://www.screencast.com/t/3ZEjQXlT',
'md5': '917df1c13798a3e96211dd1561fded83',
'info_dict': {
'id': '3ZEjQXlT',
'ext': 'm4v',
'title': 'Color Measurement with Ocean Optics Spectrometers',
'description': 'md5:240369cde69d8bed61349a199c5fb153',
'thumbnail': 're:^https?://.*\.(?:gif|jpg)$',
}
}, {
'url': 'http://www.screencast.com/t/V2uXehPJa1ZI',
'md5': 'e8e4b375a7660a9e7e35c33973410d34',
'info_dict': {
'id': 'V2uXehPJa1ZI',
'ext': 'mov',
'title': 'The Amadeus Spectrometer',
'description': 're:^In this video, our friends at.*To learn more about Amadeus, visit',
'thumbnail': 're:^https?://.*\.(?:gif|jpg)$',
}
}, {
'url': 'http://www.screencast.com/t/aAB3iowa',
'md5': 'dedb2734ed00c9755761ccaee88527cd',
'info_dict': {
'id': 'aAB3iowa',
'ext': 'mp4',
'title': 'Google Earth Export',
'description': 'Provides a demo of a CommunityViz export to Google Earth, one of the 3D viewing options.',
'thumbnail': 're:^https?://.*\.(?:gif|jpg)$',
}
}, {
'url': 'http://www.screencast.com/t/X3ddTrYh',
'md5': '669ee55ff9c51988b4ebc0877cc8b159',
'info_dict': {
'id': 'X3ddTrYh',
'ext': 'wmv',
'title': 'Toolkit 6 User Group Webinar (2014-03-04) - Default Judgment and First Impression',
'description': 'md5:7b9f393bc92af02326a5c5889639eab0',
'thumbnail': 're:^https?://.*\.(?:gif|jpg)$',
}
},
]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
video_url = self._html_search_regex(
r'<embed name="Video".*?src="([^"]+)"', webpage,
'QuickTime embed', default=None)
if video_url is None:
flash_vars_s = self._html_search_regex(
r'<param name="flashVars" value="([^"]+)"', webpage, 'flash vars',
default=None)
if not flash_vars_s:
flash_vars_s = self._html_search_regex(
r'<param name="initParams" value="([^"]+)"', webpage, 'flash vars',
default=None)
if flash_vars_s:
flash_vars_s = flash_vars_s.replace(',', '&')
if flash_vars_s:
flash_vars = compat_parse_qs(flash_vars_s)
video_url_raw = compat_urllib_request.quote(
flash_vars['content'][0])
video_url = video_url_raw.replace('http%3A', 'http:')
if video_url is None:
video_meta = self._html_search_meta(
'og:video', webpage, default=None)
if video_meta:
video_url = self._search_regex(
r'src=(.*?)(?:$|&)', video_meta,
'meta tag video URL', default=None)
if video_url is None:
raise ExtractorError('Cannot find video')
title = self._og_search_title(webpage, default=None)
if title is None:
title = self._html_search_regex(
[r'<b>Title:</b> ([^<]*)</div>',
r'class="tabSeperator">></span><span class="tabText">(.*?)<'],
webpage, 'title')
thumbnail = self._og_search_thumbnail(webpage)
description = self._og_search_description(webpage, default=None)
if description is None:
description = self._html_search_meta('description', webpage)
return {
'id': video_id,
'url': video_url,
'title': title,
'description': description,
'thumbnail': thumbnail,
}
| unlicense |
jesramirez/odoo | addons/hr_timesheet_sheet/report/__init__.py | 342 | 1074 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_timesheet_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
AmandaMoen/AmandaMoen | notes/resources/UW_IntroClass/class8.5/code/basic_app_4.py | 1 | 7168 | #!/usr/bin/env python
"""
Example of the very basic, minimal framework for a wxPython application
This version adds a single button
"""
import wx
import os
#--------------------------------------------------------------
# This is how you pre-establish a file filter so that the dialog
# only shows the extension(s) you want it to.
wildcard = "Python source (*.py)|*.py|" \
"Compiled Python (*.pyc)|*.pyc|" \
"SPAM files (*.spam)|*.spam|" \
"Egg file (*.egg)|*.egg|" \
"All files (*.*)|*.*"
#--------------------------------------------------------------
class AppLogic(object):
"""
A class to hold the application Application Logic.
You generally don't want the real logic of the app mixed
in with the GUI
In a real app, this would be a substantial collection of
modules, classes, etc...
"""
def file_open(self, filename="default_name"):
"""This method opens a file"""
print "Open a file: "
print "I'd be opening file: %s now"%filename
def file_close(self):
"""This method closes a file"""
print "Close a file: "
print "I'd be closing a file now"
class TestFrame(wx.Frame):
def __init__(self, app_logic, *args, **kwargs):
kwargs.setdefault('title', "Simple test App")
wx.Frame.__init__(self, *args, **kwargs)
self.app_logic = app_logic
# Build up the menu bar:
menuBar = wx.MenuBar()
fileMenu = wx.Menu()
saveasMenuItem = fileMenu.Append(wx.ID_ANY, "&Save As", "Create a new file")
self.Bind(wx.EVT_MENU, self.onSaveAs, saveasMenuItem )
openMenuItem = fileMenu.Append(wx.ID_ANY, "&Open", "Open an existing file" )
self.Bind(wx.EVT_MENU, self.onOpen, openMenuItem)
closeMenuItem = fileMenu.Append(wx.ID_ANY, "&Close", "Close a file" )
self.Bind(wx.EVT_MENU, self.onClose, closeMenuItem)
exitMenuItem = fileMenu.Append(wx.ID_EXIT, "Exit", "Exit the application")
self.Bind(wx.EVT_MENU, self.onExit, exitMenuItem)
menuBar.Append(fileMenu, "&File")
helpMenu = wx.Menu()
helpMenuItem = helpMenu.Append(wx.ID_HELP, "Help", "Get help")
menuBar.Append(helpMenu, "&Help")
self.SetMenuBar(menuBar)
## add just a single button:
self.theButton = wx.Button(self, label="Push Me")
self.theButton.Bind(wx.EVT_BUTTON, self.onButton)
self.theButton.Bind(wx.EVT_RIGHT_DOWN, self.onRight)
def onButton(self, evt=None):
print "You pushed the button!"
evt.Skip()
def onRight(self, evt=None):
print "right click!"
evt.Skip()
def onClose(self, evt=None):
print "close menu selected"
self.file_close()
def onExit(self, evt=None):
print "Exit the program here"
print "The event passed to onExit is type ", type(evt),
self.Close()
def onSaveAs ( self, evt=None ):
"""This method saves the file with a new name"""
# Create the dialog. In this case the current directory is forced as the starting
# directory for the dialog, and no default file name is forced. This can easilly
# be changed in your program. This is an 'save' dialog.
#
# Unlike the 'open dialog' example found elsewhere, this example does NOT
# force the current working directory to change if the user chooses a different
# directory than the one initially set.
dlg = wx.FileDialog(self,
message="Save file as ...",
defaultDir=os.getcwd(),
defaultFile="",
wildcard=wildcard,
style=wx.SAVE )
# This sets the default filter that the user will initially see. Otherwise,
# the first filter in the list will be used by default.
dlg.SetFilterIndex(2)
# Show the dialog and retrieve the user response. If it is the OK response,
# process the data.
if dlg.ShowModal() == wx.ID_OK:
path = dlg.GetPath()
print "In onSaveAs, the path is %s" % path
# Normally, at this point you would save your data using the file and path
# data that the user provided to you, but since we didn't actually start
# with any data to work with, that would be difficult.
#
# The code to do so would be similar to this, assuming 'data' contains
# the data you want to save:
#
# fp = file(path, 'w') # Create file anew
# fp.write(data)
# fp.close()
#
# You might want to add some error checking :-)
else :
print "The file dialog was canceled before anything was selected"
# Note that the current working dir didn't change. This is good since
# that's the way we set it up.
# Destroy the dialog. Don't do this until you are done with it!
# BAD things can happen otherwise!
dlg.Destroy()
def onOpen(self, evt=None):
"""This method opens an existing file"""
print "Open a file: "
# Create the dialog. In this case the current directory is forced as the starting
# directory for the dialog, and no default file name is forced. This can easilly
# be changed in your program. This is an 'open' dialog, and allows multitple
# file selections as well.
#
# Finally, if the directory is changed in the process of getting files, this
# dialog is set up to change the current working directory to the path chosen.
dlg = wx.FileDialog( self,
message="Choose a file",
defaultDir=os.getcwd(),
defaultFile="",
wildcard=wildcard,
style=wx.OPEN | wx.CHANGE_DIR
)
# Show the dialog and retrieve the user response. If it is the OK response,
# process the data.
if dlg.ShowModal() == wx.ID_OK:
# This returns a Python list of files that were selected.
path = dlg.GetPath()
print "I'd be opening file in onOpen ", path
self.app_logic.file_open( path )
else :
print "The file dialog was canceled before anything was selected"
# Destroy the dialog. Don't do this until you are done with it!
# BAD things can happen otherwise!
dlg.Destroy()
def file_close(self):
"""This method closes a file"""
print "Close a file: "
print "I'd be closing a file now"
class TestApp(wx.App):
def OnInit(self):
"""
App initilization goes here -- not much to do, in this case
"""
app_logic = AppLogic()
f = TestFrame(app_logic, parent=None)
f.Show()
return True
if __name__ == "__main__":
app = TestApp(False)
app.MainLoop()
| gpl-2.0 |
jaggu303619/asylum-v2.0 | openerp/addons/resource/faces/pcalendar.py | 433 | 28436 | #@+leo-ver=4
#@+node:@file pcalendar.py
#@@language python
#@<< Copyright >>
#@+node:<< Copyright >>
############################################################################
# Copyright (C) 2005, 2006, 2007, 2008 by Reithinger GmbH
# [email protected]
#
# This file is part of faces.
#
# faces is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# faces is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
############################################################################
#@-node:<< Copyright >>
#@nl
"""
This module contains all classes and functions for the project plan calendar
"""
#@<< Imports >>
#@+node:<< Imports >>
from string import *
import datetime
import time
import re
import locale
import bisect
import sys
TIME_RANGE_PATTERN = re.compile("(\\d+):(\\d+)\\s*-\\s*(\\d+):(\\d+)")
TIME_DELTA_PATTERN = re.compile("([-+]?\\d+(\\.\\d+)?)([dwmyMH])")
DEFAULT_MINIMUM_TIME_UNIT = 15
DEFAULT_WORKING_DAYS_PER_WEEK = 5
DEFAULT_WORKING_DAYS_PER_MONTH = 20
DEFAULT_WORKING_DAYS_PER_YEAR = 200
DEFAULT_WORKING_HOURS_PER_DAY = 8
DEFAULT_WORKING_TIMES = ( (8 * 60, 12 * 60 ),
(13 * 60, 17 * 60 ) )
DEFAULT_WORKING_DAYS = { 0 : DEFAULT_WORKING_TIMES,
1 : DEFAULT_WORKING_TIMES,
2 : DEFAULT_WORKING_TIMES,
3 : DEFAULT_WORKING_TIMES,
4 : DEFAULT_WORKING_TIMES,
5 : (),
6 : () }
#@-node:<< Imports >>
#@nl
#@+others
#@+node:to_time_range
def to_time_range(src):
"""
converts a string to a timerange, i.e
(from, to)
from, to are ints, specifing the minutes since midnight
"""
if not src: return ()
mo = TIME_RANGE_PATTERN.match(src)
if not mo:
raise ValueError("%s is no time range" % src)
from_time = int(mo.group(1)) * 60 + int(mo.group(2))
to_time = int(mo.group(3)) * 60 + int(mo.group(4))
return from_time, to_time
#@-node:to_time_range
#@+node:to_datetime
def to_datetime(src):
"""
a tolerant conversion function to convert different strings
to a datetime.dateime
"""
#to get the original value for wrappers
new = getattr(src, "_value", src)
while new is not src:
src = new
new = getattr(src, "_value", src)
if isinstance(src, _WorkingDateBase):
src = src.to_datetime()
if isinstance(src, datetime.datetime):
return src
src = str(src)
formats = [ "%x %H:%M",
"%x",
"%Y-%m-%d %H:%M",
"%y-%m-%d %H:%M",
"%d.%m.%Y %H:%M",
"%d.%m.%y %H:%M",
"%Y%m%d %H:%M",
"%d/%m/%y %H:%M",
"%d/%m/%Y %H:%M",
"%d/%m/%Y",
"%d/%m/%y",
"%Y-%m-%d",
"%y-%m-%d",
"%d.%m.%Y",
"%d.%m.%y",
"%Y%m%d" ]
for f in formats:
try:
conv = time.strptime(src, f)
return datetime.datetime(*conv[0:-3])
except Exception, e:
pass
raise TypeError("'%s' (%s) is not a datetime" % (src, str(type(src))))
#@-node:
#@+node:_to_days
def _to_days(src):
"""
converts a string of the day abreviations mon, tue, wed,
thu, fri, sat, sun to a dir with correct weekday indices.
For Example
convert_to_days('mon, tue, thu') results in
{ 0:1, 1:1, 3:1 }
"""
tokens = src.split(",")
result = { }
for t in tokens:
try:
index = { "mon" : 0,
"tue" : 1,
"wed" : 2,
"thu" : 3,
"fri" : 4,
"sat" : 5,
"sun" : 6 } [ lower(t.strip()) ]
result[index] = 1
except:
raise ValueError("%s is not a day" % (t))
return result
#@-node:_to_days
#@+node:_add_to_time_spans
def _add_to_time_spans(src, to_add, is_free):
if not isinstance(to_add, (tuple, list)):
to_add = (to_add,)
tmp = []
for start, end, f in src:
tmp.append((start, True, f))
tmp.append((end, False, f))
for v in to_add:
if isinstance(v, (tuple, list)):
start = to_datetime(v[0])
end = to_datetime(v[1])
else:
start = to_datetime(v)
end = start.replace(hour=0, minute=0) + datetime.timedelta(1)
tmp.append((start, start <= end, is_free))
tmp.append((end, start > end, is_free))
tmp.sort()
# 0: date
# 1: is_start
# 2: is_free
sequence = []
free_count = 0
work_count = 0
last = None
for date, is_start, is_free in tmp:
if is_start:
if is_free:
if not free_count and not work_count:
last = date
free_count += 1
else:
if not work_count:
if free_count: sequence.append((last, date, True))
last = date
work_count += 1
else:
if is_free:
assert(free_count > 0)
free_count -= 1
if not free_count and not work_count:
sequence.append((last, date, True))
else:
assert(work_count > 0)
work_count -= 1
if not work_count: sequence.append((last, date, False))
if free_count: last = date
return tuple(sequence)
#@-node:_add_to_time_spans
#@+node:to_timedelta
def to_timedelta(src, cal=None, is_duration=False):
"""
converts a string to a datetime.timedelta. If cal is specified
it will be used for getting the working times. if is_duration=True
working times will not be considered. Valid units are
d for Days
w for Weeks
m for Months
y for Years
H for Hours
M for Minutes
"""
cal = cal or _default_calendar
if isinstance(src, datetime.timedelta):
return datetime.timedelta(src.days, seconds=src.seconds, calendar=cal)
if isinstance(src, (long, int, float)):
src = "%sM" % str(src)
if not isinstance(src, basestring):
raise ValueError("%s is not a duration" % (repr(src)))
src = src.strip()
if is_duration:
d_p_w = 7
d_p_m = 30
d_p_y = 360
d_w_h = 24
else:
d_p_w = cal.working_days_per_week
d_p_m = cal.working_days_per_month
d_p_y = cal.working_days_per_year
d_w_h = cal.working_hours_per_day
def convert_minutes(minutes):
minutes = int(minutes)
hours = minutes / 60
minutes = minutes % 60
days = hours / d_w_h
hours = hours % d_w_h
return [ days, 0, 0, 0, minutes, hours ]
def convert_days(value):
days = int(value)
value -= days
value *= d_w_h
hours = int(value)
value -= hours
value *= 60
minutes = round(value)
return [ days, 0, 0, 0, minutes, hours ]
sum_args = [ 0, 0, 0, 0, 0, 0 ]
split = src.split(" ")
for s in split:
mo = TIME_DELTA_PATTERN.match(s)
if not mo:
raise ValueError(src +
" is not a valid duration: valid"
" units are: d w m y M H")
unit = mo.group(3)
val = float(mo.group(1))
if unit == 'd':
args = convert_days(val)
elif unit == 'w':
args = convert_days(val * d_p_w)
elif unit == 'm':
args = convert_days(val * d_p_m)
elif unit == 'y':
args = convert_days(val * d_p_y)
elif unit == 'M':
args = convert_minutes(val)
elif unit == 'H':
args = convert_minutes(val * 60)
sum_args = [ a + b for a, b in zip(sum_args, args) ]
sum_args = tuple(sum_args)
return datetime.timedelta(*sum_args)
#@-node:to_timedelta
#@+node:timedelta_to_str
def timedelta_to_str(delta, format, cal=None, is_duration=False):
cal = cal or _default_calendar
if is_duration:
d_p_w = 7
d_p_m = 30
d_p_y = 365
d_w_h = 24
else:
d_p_w = cal.working_days_per_week
d_p_m = cal.working_days_per_month
d_p_y = cal.working_days_per_year
d_w_h = cal.working_hours_per_day
has_years = format.find("%y") > -1
has_minutes = format.find("%M") > -1
has_hours = format.find("%H") > -1 or has_minutes
has_days = format.find("%d") > -1
has_weeks = format.find("%w") > -1
has_months = format.find("%m") > -1
result = format
days = delta.days
d_r = (days, format)
minutes = delta.seconds / 60
def rebase(d_r, cond1, cond2, letter, divisor):
#rebase the days
if not cond1: return d_r
days, result = d_r
if cond2:
val = days / divisor
if not val:
result = re.sub("{[^{]*?%" + letter + "[^}]*?}", "", result)
result = result.replace("%" + letter, str(val))
days %= divisor
else:
result = result.replace("%" + letter,
locale.format("%.2f",
(float(days) / divisor)))
return (days, result)
d_r = rebase(d_r, has_years, has_months or has_weeks or has_days, "y", d_p_y)
d_r = rebase(d_r, has_months, has_weeks or has_days, "m", d_p_m)
d_r = rebase(d_r, has_weeks, has_days, "w", d_p_w)
days, result = d_r
if not has_days:
minutes += days * d_w_h * 60
days = 0
if has_hours:
if not days:
result = re.sub("{[^{]*?%d[^}]*?}", "", result)
result = result.replace("%d", str(days))
else:
result = result.replace("%d",
"%.2f" % (days + float(minutes)
/ (d_w_h * 60)))
if has_hours:
if has_minutes:
val = minutes / 60
if not val:
result = re.sub("{[^{]*?%H[^}]*?}", "", result)
result = result.replace("%H", str(val))
minutes %= 60
else:
result = result.replace("%H", "%.2f" % (float(minutes) / 60))
if not minutes:
result = re.sub("{[^{]*?%M[^}]*?}", "", result)
result = result.replace("%M", str(minutes))
result = result.replace("{", "")
result = result.replace("}", "")
return result.strip()
#@-node:timedelta_to_str
#@+node:strftime
def strftime(dt, format):
"""
an extended version of strftime, that introduces some new
directives:
%IW iso week number
%IY iso year
%IB full month name appropriate to iso week
%ib abbreviated month name appropriate to iso week
%im month as decimal number appropriate to iso week
"""
iso = dt.isocalendar()
if iso[0] != dt.year:
iso_date = dt.replace(day=1, month=1)
format = format \
.replace("%IB", iso_date.strftime("%B"))\
.replace("%ib", iso_date.strftime("%b"))\
.replace("%im", iso_date.strftime("%m"))
else:
format = format \
.replace("%IB", "%B")\
.replace("%ib", "%b")\
.replace("%im", "%m")
format = format \
.replace("%IW", str(iso[1]))\
.replace("%IY", str(iso[0]))\
return dt.strftime(format)
#@-node:strftime
#@+node:union
def union(*calendars):
"""
returns a calendar that unifies all working times
"""
#@ << check arguments >>
#@+node:<< check arguments >>
if len(calendars) == 1:
calendars = calendars[0]
#@nonl
#@-node:<< check arguments >>
#@nl
#@ << intersect vacations >>
#@+node:<< intersect vacations >>
free_time = []
for c in calendars:
for start, end, is_free in c.time_spans:
if is_free:
free_time.append((start, False))
free_time.append((end, True))
count = len(calendars)
open = 0
time_spans = []
free_time.sort()
for date, is_end in free_time:
if is_end:
if open == count:
time_spans.append((start, date, True))
open -= 1
else:
open += 1
start = date
#@-node:<< intersect vacations >>
#@nl
#@ << unify extra worktime >>
#@+node:<< unify extra worktime >>
for c in calendars:
for start, end, is_free in c.time_spans:
if not is_free:
time_spans = _add_to_time_spans(time_spans, start, end)
#@nonl
#@-node:<< unify extra worktime >>
#@nl
#@ << unify working times >>
#@+node:<< unify working times >>
working_times = {}
for d in range(0, 7):
times = []
for c in calendars:
for start, end in c.working_times.get(d, []):
times.append((start, False))
times.append((end, True))
times.sort()
open = 0
ti = []
start = None
for time, is_end in times:
if not is_end:
if not start: start = time
open += 1
else:
open -= 1
if not open:
ti.append((start, time))
start = None
if ti:
working_times[d] = ti
#@-node:<< unify working times >>
#@nl
#@ << create result calendar >>
#@+node:<< create result calendar >>
result = Calendar()
result.working_times = working_times
result.time_spans = time_spans
result._recalc_working_time()
result._build_mapping()
#@nonl
#@-node:<< create result calendar >>
#@nl
return result
#@nonl
#@-node:union
#@+node:class _CalendarItem
class _CalendarItem(int):
#@ << class _CalendarItem declarations >>
#@+node:<< class _CalendarItem declarations >>
__slots__ = ()
calender = None
#@-node:<< class _CalendarItem declarations >>
#@nl
#@ @+others
#@+node:__new__
def __new__(cls, val):
try:
return int.__new__(cls, val)
except OverflowError:
return int.__new__(cls, sys.maxint)
#@-node:__new__
#@+node:round
def round(self, round_up=True):
m_t_u = self.calendar.minimum_time_unit
minutes = int(self)
base = (minutes / m_t_u) * m_t_u
minutes %= m_t_u
round_up = round_up and minutes > 0 or minutes > m_t_u / 2
if round_up: base += m_t_u
return self.__class__(base)
#@-node:round
#@-others
#@-node:class _CalendarItem
#@+node:class _Minutes
class _Minutes(_CalendarItem):
#@ << class _Minutes declarations >>
#@+node:<< class _Minutes declarations >>
__slots__ = ()
STR_FORMAT = "{%dd}{ %HH}{ %MM}"
#@-node:<< class _Minutes declarations >>
#@nl
#@ @+others
#@+node:__new__
def __new__(cls, src=0, is_duration=False):
"""
converts a timedelta in working minutes.
"""
if isinstance(src, cls) or type(src) is int:
return _CalendarItem.__new__(cls, src)
cal = cls.calendar
if not isinstance(src, datetime.timedelta):
src = to_timedelta(src, cal, is_duration)
d_w_h = is_duration and 24 or cal.working_hours_per_day
src = src.days * d_w_h * 60 + src.seconds / 60
return _CalendarItem.__new__(cls, src)
#@-node:__new__
#@+node:__cmp__
def __cmp__(self, other):
return cmp(int(self), int(self.__class__(other)))
#@-node:__cmp__
#@+node:__add__
def __add__(self, other):
try:
return self.__class__(int(self) + int(self.__class__(other)))
except:
return NotImplemented
#@-node:__add__
#@+node:__sub__
def __sub__(self, other):
try:
return self.__class__(int(self) - int(self.__class__(other)))
except:
return NotImplemented
#@-node:__sub__
#@+node:to_timedelta
def to_timedelta(self, is_duration=False):
d_w_h = is_duration and 24 or self.calendar.working_hours_per_day
minutes = int(self)
hours = minutes / 60
minutes = minutes % 60
days = hours / d_w_h
hours = hours % d_w_h
return datetime.timedelta(days, hours=hours, minutes=minutes)
#@nonl
#@-node:to_timedelta
#@+node:strftime
def strftime(self, format=None, is_duration=False):
td = self.to_timedelta(is_duration)
return timedelta_to_str(td, format or self.STR_FORMAT,
self.calendar, is_duration)
#@nonl
#@-node:strftime
#@-others
#@-node:class _Minutes
#@+node:class _WorkingDateBase
class _WorkingDateBase(_CalendarItem):
"""
A daytetime which has only valid values within the
workingtimes of a specific calendar
"""
#@ << class _WorkingDateBase declarations >>
#@+node:<< class _WorkingDateBase declarations >>
timetuple = True
STR_FORMAT = "%x %H:%M"
_minutes = _Minutes
__slots__ = ()
#@-node:<< class _WorkingDateBase declarations >>
#@nl
#@ @+others
#@+node:__new__
def __new__(cls, src):
#cls.__bases__[0] is the base of
#the calendar specific StartDate and EndDate
if isinstance(src, cls.__bases__[0]) or type(src) in (int, float):
return _CalendarItem.__new__(cls, src)
src = cls.calendar.from_datetime(to_datetime(src))
return _CalendarItem.__new__(cls, src)
#@-node:__new__
#@+node:__repr__
def __repr__(self):
return self.strftime()
#@-node:__repr__
#@+node:to_datetime
def to_datetime(self):
return self.to_starttime()
#@-node:to_datetime
#@+node:to_starttime
def to_starttime(self):
return self.calendar.to_starttime(self)
#@-node:to_starttime
#@+node:to_endtime
def to_endtime(self):
return self.calendar.to_endtime(self)
#@-node:to_endtime
#@+node:__cmp__
def __cmp__(self, other):
return cmp(int(self), int(self.__class__(other)))
#@-node:__cmp__
#@+node:__add__
def __add__(self, other):
try:
return self.__class__(int(self) + int(self._minutes(other)))
except ValueError, e:
raise e
except:
return NotImplemented
#@-node:__add__
#@+node:__sub__
def __sub__(self, other):
if isinstance(other, (datetime.timedelta, str, _Minutes)):
try:
other = self._minutes(other)
except:
pass
if isinstance(other, self._minutes):
return self.__class__(int(self) - int(other))
try:
return self._minutes(int(self) - int(self.__class__(other)))
except:
return NotImplemented
#@-node:__sub__
#@+node:strftime
def strftime(self, format=None):
return strftime(self.to_datetime(), format or self.STR_FORMAT)
#@-node:strftime
#@-others
#@-node:class _WorkingDateBase
#@+node:class Calendar
class Calendar(object):
"""
A calendar to specify working times and vacations.
The calendars epoch start at 1.1.1979
"""
#@ << declarations >>
#@+node:<< declarations >>
# january the first must be a monday
EPOCH = datetime.datetime(1979, 1, 1)
minimum_time_unit = DEFAULT_MINIMUM_TIME_UNIT
working_days_per_week = DEFAULT_WORKING_DAYS_PER_WEEK
working_days_per_month = DEFAULT_WORKING_DAYS_PER_MONTH
working_days_per_year = DEFAULT_WORKING_DAYS_PER_YEAR
working_hours_per_day = DEFAULT_WORKING_HOURS_PER_DAY
now = EPOCH
#@-node:<< declarations >>
#@nl
#@ @+others
#@+node:__init__
def __init__(self):
self.time_spans = ()
self._dt_num_can = ()
self._num_dt_can = ()
self.working_times = { }
self._recalc_working_time()
self._make_classes()
#@-node:__init__
#@+node:__or__
def __or__(self, other):
if isinstance(other, Calendar):
return union(self, other)
return NotImplemented
#@nonl
#@-node:__or__
#@+node:clone
def clone(self):
result = Calendar()
result.working_times = self.working_times.copy()
result.time_spans = self.time_spans
result._recalc_working_time()
result._build_mapping()
return result
#@nonl
#@-node:clone
#@+node:set_working_days
def set_working_days(self, day_range, trange, *further_tranges):
"""
Sets the working days of an calendar
day_range is a string of day abbreviations like 'mon, tue'
trange and further_tranges is a time range string like
'8:00-10:00'
"""
time_ranges = [ trange ] + list(further_tranges)
time_ranges = filter(bool, map(to_time_range, time_ranges))
days = _to_days(day_range)
for k in days.keys():
self.working_times[k] = time_ranges
self._recalc_working_time()
self._build_mapping()
#@-node:set_working_days
#@+node:set_vacation
def set_vacation(self, value):
"""
Sets vacation time.
value is either a datetime literal or
a sequence of items that can be
a datetime literals and or pair of datetime literals
"""
self.time_spans = _add_to_time_spans(self.time_spans, value, True)
self._build_mapping()
#@-node:set_vacation
#@+node:set_extra_work
def set_extra_work(self, value):
"""
Sets extra working time
value is either a datetime literal or
a sequence of items that can be
a datetime literals and or pair of datetime literals
"""
self.time_spans = _add_to_time_spans(self.time_spans, value, False)
self._build_mapping()
#@-node:set_extra_work
#@+node:from_datetime
def from_datetime(self, value):
assert(isinstance(value, datetime.datetime))
delta = value - self.EPOCH
days = delta.days
minutes = delta.seconds / 60
# calculate the weektime
weeks = days / 7
wtime = self.week_time * weeks
# calculate the daytime
days %= 7
dtime = sum(self.day_times[:days])
# calculate the minute time
slots = self.working_times.get(days, DEFAULT_WORKING_DAYS[days])
mtime = 0
for start, end in slots:
if minutes > end:
mtime += end - start
else:
if minutes > start:
mtime += minutes - start
break
result = wtime + dtime + mtime
# map exceptional timespans
dt_num_can = self._dt_num_can
pos = bisect.bisect(dt_num_can, (value,)) - 1
if pos >= 0:
start, end, nstart, nend, cend = dt_num_can[pos]
if value < end:
if nstart < nend:
delta = value - start
delta = delta.days * 24 * 60 + delta.seconds / 60
result = nstart + delta
else:
result = nstart
else:
result += (nend - cend) # == (result - cend) + nend
return result
#@-node:from_datetime
#@+node:split_time
def split_time(self, value):
#map exceptional timespans
num_dt_can = self._num_dt_can
pos = bisect.bisect(num_dt_can, (value, sys.maxint)) - 1
if pos >= 0:
nstart, nend, start, end, cend = num_dt_can[pos]
if value < nend:
value = start + datetime.timedelta(minutes=value - nstart)
delta = value - self.EPOCH
return delta.days / 7, delta.days % 7, delta.seconds / 60, -1
else:
value += (cend - nend) # (value - nend + cend)
#calculate the weeks since the epoch
weeks = value / self.week_time
value %= self.week_time
#calculate the remaining days
days = 0
for day_time in self.day_times:
if value < day_time: break
value -= day_time
days += 1
#calculate the remaining minutes
minutes = 0
slots = self.working_times.get(days, DEFAULT_WORKING_DAYS[days])
index = 0
for start, end in slots:
delta = end - start
if delta > value:
minutes = start + value
break
else:
value -= delta
index += 1
return weeks, days, minutes, index
#@-node:split_time
#@+node:to_starttime
def to_starttime(self, value):
weeks, days, minutes, index = self.split_time(value)
return self.EPOCH + datetime.timedelta(weeks=weeks,
days=days,
minutes=minutes)
#@-node:to_starttime
#@+node:to_endtime
def to_endtime(self, value):
return self.to_starttime(value - 1) + datetime.timedelta(minutes=1)
#@-node:to_endtime
#@+node:get_working_times
def get_working_times(self, day):
return self.working_times.get(day, DEFAULT_WORKING_DAYS[day])
#@-node:get_working_times
#@+node:_build_mapping
def _build_mapping(self):
self._dt_num_can = self._num_dt_can = ()
dt_num_can = []
num_dt_can = []
delta = self.Minutes()
for start, end, is_free in self.time_spans:
cstart = self.StartDate(start)
cend = self.EndDate(end)
nstart = cstart + delta
if not is_free:
d = end - start
d = d.days * 24 * 60 + d.seconds / 60
nend = nstart + d
else:
nend = nstart
delta += (nend - nstart) - (cend - cstart)
dt_num_can.append((start, end, nstart, nend, cend))
num_dt_can.append((nstart, nend, start, end, cend))
self._dt_num_can = tuple(dt_num_can)
self._num_dt_can = tuple(num_dt_can)
#@-node:_build_mapping
#@+node:_recalc_working_time
def _recalc_working_time(self):
def slot_sum_time(day):
slots = self.working_times.get(day, DEFAULT_WORKING_DAYS[day])
return sum(map(lambda slot: slot[1] - slot[0], slots))
self.day_times = map(slot_sum_time, range(0, 7))
self.week_time = sum(self.day_times)
#@-node:_recalc_working_time
#@+node:_make_classes
def _make_classes(self):
#ensure that the clases are instance specific
class minutes(_Minutes):
calendar = self
__slots__ = ()
class db(_WorkingDateBase):
calendar = self
_minutes = minutes
__slots__ = ()
class wdt(db): __slots__ = ()
class edt(db):
__slots__ = ()
def to_datetime(self):
return self.to_endtime()
self.Minutes, self.StartDate, self.EndDate = minutes, wdt, edt
self.WorkingDate = self.StartDate
#@-node:_make_classes
#@-others
_default_calendar = Calendar()
WorkingDate = _default_calendar.WorkingDate
StartDate = _default_calendar.StartDate
EndDate = _default_calendar.EndDate
Minutes = _default_calendar.Minutes
#@-node:class Calendar
#@-others
if __name__ == '__main__':
cal = Calendar()
start = EndDate("10.1.2005")
delay = Minutes("4H")
start2 = cal.StartDate(start)
start3 = cal.StartDate("10.1.2005")
#@-node:@file pcalendar.py
#@-leo
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
heihachi/PokemonGo-Bot | pokemongo_bot/cell_workers/pokemon_optimizer.py | 1 | 49784 | from __future__ import unicode_literals
# import datetime
import difflib
import itertools
import json
import math
import os
import time
import datetime
from pokemongo_bot import inventory
from pokemongo_bot.base_dir import _base_dir
from pokemongo_bot.base_task import BaseTask
from pokemongo_bot.human_behaviour import sleep, action_delay
from pokemongo_bot.item_list import Item
from pokemongo_bot.tree_config_builder import ConfigException
from pokemongo_bot.worker_result import WorkerResult
SUCCESS = 1
ERROR_XP_BOOST_ALREADY_ACTIVE = 3
LOG_TIME_INTERVAL = 120
class PokemonOptimizer(BaseTask):
SUPPORTED_TASK_API_VERSION = 1
def __init__(self, bot, config):
super(PokemonOptimizer, self).__init__(bot, config)
def initialize(self):
self.max_pokemon_storage = inventory.get_pokemon_inventory_size()
self.last_pokemon_count = 0
self.pokemon_names = [p.name for p in inventory.pokemons().STATIC_DATA]
self.evolution_map = {}
self.debug = self.config.get('debug', False)
self.ongoing_stardust_count = 0
self.buddy = None
self.buddyid = 0
self.lock_buddy = True
self.no_log_until = 0
self.ignore_favorite = []
self.used_lucky_egg = None
pokemon_upgrade_cost_file = os.path.join(_base_dir, "data", "pokemon_upgrade_cost.json")
with open(pokemon_upgrade_cost_file, "r") as fd:
self.pokemon_upgrade_cost = json.load(fd)
if self.config.get("keep", None) is not None:
raise ConfigException("Pokemon Optimizer configuration has changed. See docs/pokemon_optimized.md or configs/config.json.optimizer.example")
if self.debug:
log_file_path = os.path.join(_base_dir, "data", "pokemon-optimizer-%s.log" % self.bot.config.username)
with open(log_file_path, "a") as _:
pass
self.log_file = open(log_file_path, "r+")
self.log_file.seek(0, 2)
self.config_bulktransfer_enabled = self.config.get("bulktransfer_enabled", False)
self.config_use_evolution_items = self.config.get("use_evolution_items", False)
self.config_max_bulktransfer = self.config.get("max_bulktransfer", 10)
self.config_min_slots_left = self.config.get("min_slots_left", 5)
self.config_action_wait_min = self.config.get("action_wait_min", 3)
self.config_action_wait_max = self.config.get("action_wait_max", 5)
self.config_transfer = self.config.get("transfer", False)
self.config_evolve = self.config.get("evolve", False)
self.config_evolve_to_final = self.config.get("evolve_to_final", True)
self.config_evolve_time = self.config.get("evolve_time", 25)
self.config_evolve_for_xp = self.config.get("evolve_for_xp", True)
self.config_transfer_after_xp_evolve = self.config.get("transfer_after_xp_evolve", True)
self.config_evolve_only_with_lucky_egg = self.config.get("evolve_only_with_lucky_egg", False)
self.config_evolve_count_for_lucky_egg = self.config.get("evolve_count_for_lucky_egg", 80)
self.config_may_use_lucky_egg = self.config.get("may_use_lucky_egg", False)
self.config_may_evolve_favorites = self.config.get("may_evolve_favorites", True)
self.config_may_upgrade_favorites = self.config.get("may_upgrade_favorites", True)
self.config_may_unfavor_pokemon = self.config.get("may_unfavor_pokemon", False)
self.config_upgrade = self.config.get("upgrade", False)
self.config_upgrade_level = self.config.get("upgrade_level", 30)
self.config_groups = self.config.get("groups", {"gym": ["Dragonite", "Snorlax", "Lapras", "Arcanine"]})
self.config_rules = self.config.get("rules", [{"mode": "overall", "top": 1, "sort": ["max_cp", "cp"], "keep": {"candy": -124}, "evolve": False, "buddy": True},
{"mode": "overall", "top": 1, "sort": ["-candy", "max_cp", "cp"], "evolve": False, "buddy": True},
{"mode": "by_family", "top": 3, "names": ["gym"], "sort": ["iv", "ncp"], "evolve": {"iv": 0.9, "ncp": 0.9}, "upgrade": {"iv": 0.9, "ncp": 0.9}},
{"mode": "by_family", "top": 1, "sort": ["iv"], "evolve": {"iv": 0.9}},
{"mode": "by_family", "top": 1, "sort": ["ncp"], "evolve": {"ncp": 0.9}},
{"mode": "by_family", "top": 1, "sort": ["cp"], "evolve": False},
{"mode": "by_pokemon", "names": ["!with_next_evolution"], "top": 1, "sort": ["dps_attack", "iv"], "keep": {"iv": 0.9}}])
if (not self.config_may_use_lucky_egg) and self.config_evolve_only_with_lucky_egg:
self.config_evolve = False
if self.config_evolve_for_xp is True:
self.config_evolve_for_xp = ["Caterpie", "Weedle", "Pidgey", "Rattata", "Nidoran F", "Nidoran M",
"Zubat", "Oddish", "Paras", "Venonat", "Psyduck", "Tentacool",
"Magnemite", "Krabby", "Voltorb", "Goldeen", "Staryu", "Eevee"]
elif self.config_evolve_for_xp is False:
self.config_evolve_for_xp = []
self.config_evolve_for_xp_whitelist, self.config_evolve_for_xp_blacklist = self.get_colorlist(self.config_evolve_for_xp)
self.config_groups["with_next_evolution"] = []
self.config_groups["with_previous_evolution"] = []
for pokemon in inventory.Pokemons.STATIC_DATA:
if pokemon.has_next_evolution:
self.config_groups["with_next_evolution"].append(pokemon.name)
if pokemon.prev_evolutions_all:
self.config_groups["with_previous_evolution"].append(pokemon.name)
def log(self, txt):
if self.log_file.tell() >= 1024 * 1024:
self.log_file.seek(0, 0)
self.log_file.write("[%s] %s\n" % (datetime.datetime.now().isoformat(str(" ")), txt))
self.log_file.flush()
def active_lucky_egg(self):
if self.used_lucky_egg is None:
return False
# If last used is bigger then 30 minutes ago
if self.used_lucky_egg > datetime.datetime.now()-datetime.timedelta(minutes=30):
return True
else:
return False
def get_pokemon_slot_left(self):
pokemon_count = inventory.Pokemons.get_space_used()
if pokemon_count != self.last_pokemon_count:
self.last_pokemon_count = pokemon_count
self.logger.info("Pokemon Bag: %s / %s", pokemon_count, self.max_pokemon_storage)
inventory.update_web_inventory()
return inventory.Pokemons.get_space_left()
def work(self):
if not self.enabled:
return WorkerResult.SUCCESS
# Repeat the optimizer 2 times, to get rid of the trash evolved.
run_number = 0
for _ in itertools.repeat(None, 2):
run_number += 1
self.check_buddy()
self.open_inventory()
keep_all = []
try_evolve_all = []
try_upgrade_all = []
buddy_all = []
favor_all = []
for rule in self.config_rules:
mode = rule.get("mode", "by_family")
names = rule.get("names", [])
check_top = rule.get("top", "all")
check_keep = rule.get("keep", True)
whitelist, blacklist = self.get_colorlist(names)
if check_top == "all" and names == [] and check_keep:
self.logger.info("WARNING!! Will not transfer any Pokemon!!")
self.logger.info(rule)
self.logger.info("This rule is set to keep (`keep` is true) all Pokemon (no `top` and no `names` set!!)")
self.logger.info("Are you sure you want this?")
if mode == "by_pokemon":
for pokemon_id, pokemon_list in self.group_by_pokemon_id(inventory.pokemons().all()):
name = inventory.pokemons().name_for(pokemon_id)
if name in blacklist:
continue
if whitelist and (name not in whitelist):
continue
sorted_list = self.score_and_sort(pokemon_list, rule)
if len(sorted_list) == 0:
continue
keep, try_evolve, try_upgrade, buddy, favor = self.get_best_pokemon_for_rule(sorted_list, rule)
keep_all += keep
try_evolve_all += try_evolve
try_upgrade_all += try_upgrade
buddy_all += buddy
favor_all += favor
elif mode == "by_family":
for family_id, pokemon_list in self.group_by_family_id(inventory.pokemons().all()):
matching_names = self.get_family_names(family_id)
if any(n in blacklist for n in matching_names):
continue
if whitelist and not any(n in whitelist for n in matching_names):
continue
sorted_list = self.score_and_sort(pokemon_list, rule)
if len(sorted_list) == 0:
continue
if family_id == 133: # "Eevee"
keep, try_evolve, try_upgrade, buddy, favor = self.get_multi_best_pokemon_for_rule(sorted_list, rule, 3)
else:
keep, try_evolve, try_upgrade, buddy, favor = self.get_best_pokemon_for_rule(sorted_list, rule)
keep_all += keep
try_evolve_all += try_evolve
try_upgrade_all += try_upgrade
buddy_all += buddy
favor_all += favor
elif mode == "overall":
pokemon_list = []
for pokemon in inventory.pokemons().all():
name = pokemon.name
if name in blacklist:
continue
if whitelist and (name not in whitelist):
continue
pokemon_list.append(pokemon)
sorted_list = self.score_and_sort(pokemon_list, rule)
if len(sorted_list) == 0:
continue
keep, try_evolve, try_upgrade, buddy, favor = self.get_best_pokemon_for_rule(sorted_list, rule)
keep_all += keep
try_evolve_all += try_evolve
try_upgrade_all += try_upgrade
buddy_all += buddy
favor_all += favor
keep_all = self.unique_pokemon_list(keep_all)
try_evolve_all = self.unique_pokemon_list(try_evolve_all)
try_upgrade_all = self.unique_pokemon_list(try_upgrade_all)
buddy_all = self.unique_pokemon_list(buddy_all)
try_favor_all = self.unique_pokemon_list(favor_all)
# Favorites has nothing to do with evolve, can be done even when bag not full
# Like a buddy
if self.config_may_unfavor_pokemon:
unfavor = []
for pokemon in inventory.pokemons().all():
if not pokemon in try_favor_all and pokemon.is_favorite:
unfavor.append(pokemon)
if len(unfavor) > 0:
self.logger.info("Marking %s Pokemon as no longer favorite", len(unfavor))
for pokemon in unfavor:
self.unfavor_pokemon(pokemon)
# Dont favor Pokemon if already a favorite
try_favor_all = [p for p in try_favor_all if not p.is_favorite]
try_favor_all = [p for p in try_favor_all if p.unique_id not in self.ignore_favorite]
if len(try_favor_all) > 0:
self.logger.info("Marking %s Pokemon as favorite", len(try_favor_all))
for pokemon in try_favor_all:
if pokemon.is_favorite is False:
self.favor_pokemon(pokemon)
if (not self.lock_buddy) and (len(buddy_all) > 0):
new_buddy = buddy_all[0]
if (not self.buddy) or (self.buddy["id"] != new_buddy.unique_id):
self.set_buddy_pokemon(new_buddy)
# Only check bag on the first run, second run ignores if the bag is empty enough
if run_number == 1 and self.get_pokemon_slot_left() > self.config_min_slots_left:
return WorkerResult.SUCCESS
transfer_all = []
evolve_all = []
upgrade_all = []
xp_all = []
for family_id, pokemon_list in self.group_by_family_id(inventory.pokemons().all()):
keep = [p for p in keep_all if self.get_family_id(p) == family_id]
try_evolve = [p for p in try_evolve_all if self.get_family_id(p) == family_id]
try_upgrade = [p for p in try_upgrade_all if self.get_family_id(p) == family_id]
transfer, evolve, upgrade, xp = self.get_evolution_plan(family_id, pokemon_list, keep, try_evolve, try_upgrade)
transfer_all += transfer
evolve_all += evolve
upgrade_all += upgrade
xp_all += xp
if not self.config_may_evolve_favorites:
self.logger.info("Removing favorites from evolve list.")
evolve_all = [p for p in evolve_all if not p.is_favorite]
if not self.config_may_upgrade_favorites:
self.logger.info("Removing favorites from upgrade list.")
upgrade_all = [p for p in upgrade_all if not p.is_favorite]
self.apply_optimization(transfer_all, evolve_all, upgrade_all, xp_all)
return WorkerResult.SUCCESS
def check_buddy(self):
self.buddy = self.bot.player_data.get("buddy_pokemon", {})
self.buddyid = self._get_buddyid()
if not self.buddy:
self.lock_buddy = False
return
pokemon = next((p for p in inventory.pokemons().all() if p.unique_id == self.buddy["id"]), None)
if not pokemon:
return
km_walked = inventory.player().player_stats.get("km_walked", 0)
last_km_awarded = self.buddy.setdefault("last_km_awarded", km_walked)
distance_walked = km_walked - last_km_awarded
distance_needed = pokemon.buddy_distance_needed
if distance_walked >= distance_needed:
self.get_buddy_walked(pokemon)
# self.buddy["start_km_walked"] can be empty here
if 'start_km_walked' not in self.buddy:
self.buddy["start_km_walked"] = 0
self.buddy["last_km_awarded"] = self.buddy["start_km_walked"] + distance_needed * int(distance_walked / distance_needed)
self.lock_buddy = False
else:
now = time.time()
if self.no_log_until < now:
self.no_log_until = now + LOG_TIME_INTERVAL
self.emit_event("buddy_walked",
formatted="Buddy {pokemon} walking: {distance_walked:.2f} / {distance_needed:.2f} km",
data={"pokemon": pokemon.name,
"distance_walked": distance_walked,
"distance_needed": distance_needed})
def open_inventory(self):
for pokemon in inventory.pokemons().all():
setattr(pokemon, "ncp", pokemon.cp_percent)
setattr(pokemon, "max_cp", pokemon.static.max_cp)
setattr(pokemon, "dps", pokemon.moveset.dps)
setattr(pokemon, "dps1", pokemon.fast_attack.dps)
setattr(pokemon, "dps2", pokemon.charged_attack.dps)
setattr(pokemon, "dps_attack", pokemon.moveset.dps_attack)
setattr(pokemon, "dps_defense", pokemon.moveset.dps_defense)
setattr(pokemon, "attack_perfection", pokemon.moveset.attack_perfection)
setattr(pokemon, "defense_perfection", pokemon.moveset.defense_perfection)
setattr(pokemon, "candy", pokemon.candy_quantity)
candy_to_evolution = max(pokemon.evolution_cost - pokemon.candy_quantity, 0)
setattr(pokemon, "candy_to_evolution", candy_to_evolution)
self.ongoing_stardust_count = self.bot.stardust
def get_colorlist(self, names):
whitelist = []
blacklist = []
for name in names:
if not name:
continue
if name[0] not in ['!', '-']:
group = self.config_groups.get(name, [])
if not group:
name = self.get_closest_name(name)
if name:
whitelist.append(name)
whitelist_sub, blacklist_sub = self.get_colorlist(group)
whitelist += whitelist_sub
blacklist += blacklist_sub
else:
name = name[1:]
group = self.config_groups.get(name, [])
if not group:
name = self.get_closest_name(name)
if name:
blacklist.append(name)
blacklist_sub, whitelist_sub = self.get_colorlist(group)
blacklist += blacklist_sub
whitelist += whitelist_sub
return (whitelist, blacklist)
def get_family_names(self, family_id):
ids = [family_id]
ids += inventory.pokemons().data_for(family_id).next_evolutions_all[:]
return [inventory.pokemons().name_for(x) for x in ids]
def get_closest_name(self, name):
mapping = {ord(x): ord(y) for x, y in zip("\u2641\u2642.-", "fm ")}
clean_names = {n.lower().translate(mapping): n for n in self.pokemon_names}
closest_names = difflib.get_close_matches(name.lower().translate(mapping), clean_names.keys(), 1)
if closest_names:
closest_name = clean_names[closest_names[0]]
if name != closest_name:
self.logger.warning("Unknown Pokemon name [%s]. Assuming it is [%s]", name, closest_name)
return closest_name
else:
raise ConfigException("Unknown Pokemon name [%s]" % name)
def group_by_pokemon_id(self, pokemon_list):
sorted_list = sorted(pokemon_list, key=self.get_pokemon_id)
return itertools.groupby(sorted_list, self.get_pokemon_id)
def group_by_family_id(self, pokemon_list):
sorted_list = sorted(pokemon_list, key=self.get_family_id)
return itertools.groupby(sorted_list, self.get_family_id)
def get_pokemon_id(self, pokemon):
return pokemon.pokemon_id
def get_family_id(self, pokemon):
return pokemon.first_evolution_id
def score_and_sort(self, pokemon_list, rule):
pokemon_list = list(pokemon_list)
if self.debug:
self.log("Pokemon %s" % pokemon_list)
self.log("Rule %s" % rule)
for pokemon in pokemon_list:
setattr(pokemon, "__score__", self.get_score(pokemon, rule))
keep = [p for p in pokemon_list if p.__score__[1] is True]
keep.sort(key=lambda p: p.__score__[0], reverse=True)
return keep
def get_score(self, pokemon, rule):
score = []
for a in rule.get("sort", []):
if a[0] == "-":
value = -getattr(pokemon, a[1:], 0)
else:
value = getattr(pokemon, a, 0)
score.append(value)
rule_keep = rule.get("keep", True)
rule_evolve = rule.get("evolve", True)
rule_upgrade = rule.get("upgrade", False)
rule_buddy = rule.get("buddy", False)
rule_favor = rule.get("favorite", False)
keep = rule_keep not in [False, {}]
keep &= self.satisfy_requirements(pokemon, rule_keep)
may_try_evolve = (hasattr(pokemon, "has_next_evolution") and pokemon.has_next_evolution())
may_try_evolve &= rule_evolve not in [False, {}]
may_try_evolve &= self.satisfy_requirements(pokemon, rule_evolve)
may_try_upgrade = rule_upgrade not in [False, {}]
may_try_upgrade &= self.satisfy_requirements(pokemon, rule_upgrade)
may_buddy = rule_buddy not in [False, {}]
may_buddy &= pokemon.in_fort is False
may_buddy &= self.satisfy_requirements(pokemon, may_buddy)
may_favor = rule_favor not in [False, {}]
may_favor &= self.satisfy_requirements(pokemon, may_favor)
if self.debug:
self.log("P:%s S:%s K:%s E:%s U:%s B:%s F:%s" % (pokemon, tuple(score), keep, may_try_evolve, may_try_upgrade, may_buddy, may_favor))
return tuple(score), keep, may_try_evolve, may_try_upgrade, may_buddy, may_favor
def satisfy_requirements(self, pokemon, req):
if type(req) is bool:
return req
satisfy = True
for a, v in req.items():
value = getattr(pokemon, a, 0)
if (type(v) is str) or (type(v) is unicode):
v = float(v)
if type(v) is list:
if type(v[0]) is list:
satisfy_range = False
for r in v:
satisfy_range |= (value >= r[0]) and (value <= r[1])
satisfy &= satisfy_range
else:
satisfy &= (value >= v[0]) and (value <= v[1])
elif v < 0:
satisfy &= (value <= abs(v))
else:
satisfy &= (value >= v)
return satisfy
def get_best_pokemon_for_rule(self, pokemon_list, rule):
pokemon_list = list(pokemon_list)
if len(pokemon_list) == 0:
return ([], [], [], [])
top = max(rule.get("top", 0), 0)
index = int(math.ceil(top)) - 1
if 0 < top < 1:
worst = object()
for a in rule.get("sort", []):
best_attribute = getattr(pokemon_list[0], a)
setattr(worst, a, best_attribute * (1 - top))
setattr(worst, "__score__", self.get_score(worst, rule))
elif 0 <= index < len(pokemon_list):
worst = pokemon_list[index]
else:
worst = pokemon_list[-1]
return self.get_better_pokemon(pokemon_list, worst)
def get_multi_best_pokemon_for_rule(self, family_list, rule, nb_branch):
family_list = list(family_list)
if len(family_list) == 0:
return ([], [], [], [])
# Handle each group of senior independently
senior_pokemon_list = [p for p in family_list if not p.has_next_evolution()]
other_family_list = [p for p in family_list if p.has_next_evolution()]
senior_pids = set(p.pokemon_id for p in senior_pokemon_list)
keep_all = []
try_evolve_all = []
try_upgrade_all = []
buddy_all = []
favor_all = []
if not self.config_evolve:
# Player handle evolution manually = Fall-back to per Pokemon behavior
for _, pokemon_list in self.group_by_pokemon_id(family_list):
keep, try_evolve, try_upgrade, buddy, favor = self.get_best_pokemon_for_rule(pokemon_list, rule)
keep_all += keep
try_evolve_all += try_evolve
try_upgrade_all += try_upgrade
buddy_all += buddy
favor_all += favor
else:
for _, pokemon_list in self.group_by_pokemon_id(senior_pokemon_list):
keep, try_evolve, try_upgrade, buddy, favor = self.get_best_pokemon_for_rule(pokemon_list, rule)
keep_all += keep
try_evolve_all += try_evolve
try_upgrade_all += try_upgrade
buddy_all += buddy
favor_all += favor
if len(other_family_list) > 0:
if len(senior_pids) < nb_branch:
# We did not get every combination yet = All other Pokemon are potentially good to keep
worst = other_family_list[-1]
else:
best = keep_all + try_evolve_all + try_upgrade_all
best.sort(key=lambda p: p.__score__[0], reverse=True)
worst = best[-1]
keep, try_evolve, try_upgrade, buddy, favor = self.get_better_pokemon(other_family_list, worst, 12)
keep_all += keep
try_evolve_all += try_evolve
try_upgrade_all += try_upgrade
buddy_all += buddy
favor_all += favor
return keep_all, try_evolve_all, try_upgrade_all, buddy_all, favor_all
def get_better_pokemon(self, pokemon_list, worst, limit=1000):
keep = [p for p in pokemon_list if p.__score__[0] >= worst.__score__[0]][:limit]
try_evolve = [p for p in keep if p.__score__[2] is True]
try_upgrade = [p for p in keep if (p.__score__[2] is False) and (p.__score__[3] is True)]
buddy = [p for p in keep if p.__score__[4] is True]
favor = [p for p in keep if p.__score__[5] is True]
return keep, try_evolve, try_upgrade, buddy, favor
def get_evolution_plan(self, family_id, family_list, keep, try_evolve, try_upgrade):
candies = inventory.candies().get(family_id).quantity
family_name = inventory.Pokemons().name_for(family_id)
# All the rest is crap, for now
crap = list(family_list)
crap = [p for p in crap if p not in keep]
crap = [p for p in crap if not p.in_fort and not p.is_favorite and not (p.unique_id == self.buddyid)]
crap.sort(key=lambda p: (p.iv, p.cp), reverse=True)
# We will gain a candy whether we choose to transfer or evolve these Pokemon
candies += len(crap)
evolve = []
for pokemon in try_evolve:
pokemon_id = pokemon.pokemon_id
needed_evolution_item = inventory.pokemons().evolution_item_for(pokemon_id)
if needed_evolution_item is not None:
if self.config_use_evolution_items:
# We need a special Item to evolve this Pokemon!
item = inventory.items().get(needed_evolution_item)
needed = inventory.pokemons().evolution_items_needed_for(pokemon_id)
if item.count < needed:
self.logger.info("To evolve a {} we need {} of {}. We have {}".format(pokemon.name, needed, item.name, item.count))
continue
else:
# pass for this Pokemon
continue
if self.config_evolve_to_final:
pokemon_id = pokemon.pokemon_id
while inventory.pokemons().has_next_evolution(pokemon_id):
candies -= inventory.pokemons().evolution_cost_for(pokemon_id)
pokemon_id = inventory.pokemons().next_evolution_ids_for(pokemon_id)[0]
else:
candies -= pokemon.evolution_cost
if candies < 0:
continue
if self.config_evolve_to_final:
pokemon_id = pokemon.pokemon_id
while inventory.pokemons().has_next_evolution(pokemon_id):
candies += 1
evolve.append(pokemon)
pokemon_id = inventory.pokemons().next_evolution_ids_for(pokemon_id)[0]
else:
candies += 1
evolve.append(pokemon)
upgrade = []
upgrade_level = min(self.config_upgrade_level, inventory.player().level + 1.5, 40)
# Highest CP on top.
if len(try_upgrade) > 0:
try_upgrade.sort(key=lambda p: (p.cp), reverse=True)
for pokemon in try_upgrade:
# self.log("Considering %s for upgrade" % pokemon.name)
if pokemon.level >= upgrade_level:
# self.log("Pokemon already at target level. %s" % pokemon.level)
continue
full_upgrade_candy_cost = 0
full_upgrade_stardust_cost = 0
for i in range(int(pokemon.level * 2), int(upgrade_level * 2)):
upgrade_cost = self.pokemon_upgrade_cost[i - 2]
full_upgrade_candy_cost += upgrade_cost[0]
full_upgrade_stardust_cost += upgrade_cost[1]
candies -= full_upgrade_candy_cost
self.ongoing_stardust_count -= full_upgrade_stardust_cost
if (candies < 0) or (self.ongoing_stardust_count < 0):
# self.log("Not enough candy: %s" % candies)
# self.log("or stardust %s" % self.ongoing_stardust_count)
# We didn' t use the stardust, so refund it...
self.ongoing_stardust_count += full_upgrade_stardust_cost
continue
# self.log("Pokemon can be upgraded!!")
upgrade.append(pokemon)
if (not self.config_evolve_for_xp) or (family_name in self.config_evolve_for_xp_blacklist):
xp = []
transfer = crap
elif self.config_evolve_for_xp_whitelist and (family_name not in self.config_evolve_for_xp_whitelist):
xp = []
transfer = crap
else:
# Compute how many crap we should keep if we want to batch evolve them for xp
lowest_evolution_cost = inventory.pokemons().evolution_cost_for(family_id)
# transfer + keep_for_xp = len(crap)
# leftover_candies = candies - len(crap) + transfer * 1
# keep_for_xp = (leftover_candies - 1) / (lowest_evolution_cost - 1)
# keep_for_xp = (candies - len(crap) + transfer - 1) / (lowest_evolution_cost - 1)
# keep_for_xp = (candies - keep_for_xp - 1) / (lowest_evolution_cost - 1)
if (candies > 0) and lowest_evolution_cost:
keep_for_xp = int((candies - 1) / lowest_evolution_cost)
else:
keep_for_xp = 0
xp = [p for p in crap if p.has_next_evolution() and p.evolution_cost == lowest_evolution_cost][:keep_for_xp]
transfer = [p for p in crap if p not in xp]
return (transfer, evolve, upgrade, xp)
def unique_pokemon_list(self, pokemon_list):
seen = set()
return [p for p in pokemon_list if not (p.unique_id in seen or seen.add(p.unique_id))]
def apply_optimization(self, transfer, evolve, upgrade, xp):
transfer_count = len(transfer)
evolve_count = len(evolve)
upgrade_count = len(upgrade)
xp_count = len(xp)
if self.config_transfer or self.bot.config.test:
if transfer_count > 0:
self.logger.info("Transferring %s Pokemon", transfer_count)
self.transfer_pokemon(transfer)
if self.config_upgrade or self.bot.config.test:
if upgrade_count > 0:
self.logger.info("Upgrading %s Pokemon [%s stardust]", upgrade_count, self.bot.stardust)
for pokemon in upgrade:
self.upgrade_pokemon(pokemon)
if self.config_evolve or self.bot.config.test:
evolve_xp_count = evolve_count + xp_count
if evolve_xp_count > 0:
skip_evolve = False
if self.config_evolve and self.config_may_use_lucky_egg and (not self.bot.config.test):
lucky_egg = inventory.items().get(Item.ITEM_LUCKY_EGG.value) # @UndefinedVariable
if lucky_egg.count == 0:
if self.config_evolve_only_with_lucky_egg:
skip_evolve = True
self.emit_event("skip_evolve",
formatted="Skipping evolution step. No lucky egg available")
elif evolve_xp_count < self.config_evolve_count_for_lucky_egg:
if self.config_evolve_only_with_lucky_egg:
skip_evolve = True
self.emit_event("skip_evolve",
formatted="Skipping evolution step. Not enough Pokemon to evolve with lucky egg: %s/%s" % (evolve_xp_count, self.config_evolve_count_for_lucky_egg))
elif self.get_pokemon_slot_left() > self.config_min_slots_left:
skip_evolve = True
self.emit_event("skip_evolve",
formatted="Waiting for more Pokemon to evolve with lucky egg: %s/%s" % (evolve_xp_count, self.config_evolve_count_for_lucky_egg))
else:
self.use_lucky_egg()
if not skip_evolve:
self.evolution_map = {}
if evolve_count > 0:
self.logger.info("Evolving %s Pokemon (the best)", evolve_count)
for pokemon in evolve:
self.evolve_pokemon(pokemon)
if xp_count > 0:
self.logger.info("Evolving %s Pokemon (for xp)", xp_count)
for pokemon in xp:
self.evolve_pokemon(pokemon, self.config_transfer_after_xp_evolve)
def transfer_pokemon(self, pokemons, skip_delay=False):
error_codes = {
0: 'UNSET',
1: 'SUCCESS',
2: 'POKEMON_DEPLOYED',
3: 'FAILED',
4: 'ERROR_POKEMON_IS_EGG',
5: 'ERROR_POKEMON_IS_BUDDY'
}
if self.config_bulktransfer_enabled and len(pokemons) > 1:
while len(pokemons) > 0:
action_delay(self.config_action_wait_min, self.config_action_wait_max)
pokemon_ids = []
count = 0
transfered = []
while len(pokemons) > 0 and count < self.config_max_bulktransfer:
pokemon = pokemons.pop()
transfered.append(pokemon)
pokemon_ids.append(pokemon.unique_id)
count = count + 1
try:
if self.config_transfer:
response_dict = self.bot.api.release_pokemon(pokemon_ids=pokemon_ids)
result = response_dict['responses']['RELEASE_POKEMON']['result']
if result != 1:
self.logger.error(u'Error while transfer pokemon: {}'.format(error_codes[result]))
return False
except Exception:
return False
for pokemon in transfered:
candy = inventory.candies().get(pokemon.pokemon_id)
if self.config_transfer and (not self.bot.config.test):
candy.add(1)
self.emit_event("pokemon_release",
formatted="Exchanged {pokemon} [IV {iv}] [CP {cp}] [{candy} candies]",
data={"pokemon": pokemon.name,
"iv": pokemon.iv,
"cp": pokemon.cp,
"candy": candy.quantity})
if self.config_transfer:
inventory.pokemons().remove(pokemon.unique_id)
with self.bot.database as db:
cursor = db.cursor()
cursor.execute("SELECT COUNT(name) FROM sqlite_master WHERE type='table' AND name='transfer_log'")
db_result = cursor.fetchone()
if db_result[0] == 1:
db.execute("INSERT INTO transfer_log (pokemon, iv, cp) VALUES (?, ?, ?)", (pokemon.name, pokemon.iv, pokemon.cp))
else:
for pokemon in pokemons:
if self.config_transfer and (not self.bot.config.test):
response_dict = self.bot.api.release_pokemon(pokemon_id=pokemon.unique_id)
else:
response_dict = {"responses": {"RELEASE_POKEMON": {"candy_awarded": 0}}}
if not response_dict:
return False
candy_awarded = response_dict.get("responses", {}).get("RELEASE_POKEMON", {}).get("candy_awarded", 0)
candy = inventory.candies().get(pokemon.pokemon_id)
if self.config_transfer and (not self.bot.config.test):
candy.add(candy_awarded)
self.emit_event("pokemon_release",
formatted="Exchanged {pokemon} [IV {iv}] [CP {cp}] [{candy} candies]",
data={"pokemon": pokemon.name,
"iv": pokemon.iv,
"cp": pokemon.cp,
"candy": candy.quantity})
if self.config_transfer and (not self.bot.config.test):
inventory.pokemons().remove(pokemon.unique_id)
with self.bot.database as db:
cursor = db.cursor()
cursor.execute("SELECT COUNT(name) FROM sqlite_master WHERE type='table' AND name='transfer_log'")
db_result = cursor.fetchone()
if db_result[0] == 1:
db.execute("INSERT INTO transfer_log (pokemon, iv, cp) VALUES (?, ?, ?)", (pokemon.name, pokemon.iv, pokemon.cp))
if not skip_delay:
action_delay(self.config_action_wait_min, self.config_action_wait_max)
return True
def use_lucky_egg(self):
lucky_egg = inventory.items().get(Item.ITEM_LUCKY_EGG.value) # @UndefinedVariable
if lucky_egg.count == 0:
return False
response_dict = self.bot.use_lucky_egg()
if not response_dict:
self.emit_event("lucky_egg_error",
level='error',
formatted="Failed to use lucky egg!")
return False
result = response_dict.get("responses", {}).get("USE_ITEM_XP_BOOST", {}).get("result", 0)
if result == SUCCESS:
lucky_egg.remove(1)
self.emit_event("used_lucky_egg",
formatted="Used lucky egg ({amount_left} left).",
data={"amount_left": lucky_egg.count})
self.used_lucky_egg = datetime.datetime.now()
return True
elif result == ERROR_XP_BOOST_ALREADY_ACTIVE:
self.emit_event("used_lucky_egg",
formatted="Lucky egg already active ({amount_left} left).",
data={"amount_left": lucky_egg.count})
return True
else:
self.emit_event("lucky_egg_error",
level='error',
formatted="Failed to use lucky egg!")
return False
def evolve_pokemon(self, pokemon, transfer=False):
while pokemon.unique_id in self.evolution_map:
pokemon = self.evolution_map[pokemon.unique_id]
if self.config_evolve and (not self.bot.config.test):
needed_evolution_item = inventory.pokemons().evolution_item_for(pokemon.pokemon_id)
if needed_evolution_item is not None:
if self.config_use_evolution_items:
# We need evolution_item_requirement with some!!
response_dict = self.bot.api.evolve_pokemon(pokemon_id=pokemon.unique_id, evolution_item_requirement=needed_evolution_item)
else:
return False
else:
response_dict = self.bot.api.evolve_pokemon(pokemon_id=pokemon.unique_id)
else:
response_dict = {"responses": {"EVOLVE_POKEMON": {"result": SUCCESS}}}
if not response_dict:
return False
result = response_dict.get("responses", {}).get("EVOLVE_POKEMON", {}).get("result", 0)
if result != SUCCESS:
self.logger.info("Can't evolve %s" % pokemon.name)
self.logger.info(response_dict)
self.logger.info(result)
return False
xp = response_dict.get("responses", {}).get("EVOLVE_POKEMON", {}).get("experience_awarded", 0)
candy_awarded = response_dict.get("responses", {}).get("EVOLVE_POKEMON", {}).get("candy_awarded", 0)
candy = inventory.candies().get(pokemon.pokemon_id)
evolution = response_dict.get("responses", {}).get("EVOLVE_POKEMON", {}).get("evolved_pokemon_data", {})
if self.config_evolve and (not self.bot.config.test):
candy.consume(pokemon.evolution_cost - candy_awarded)
inventory.player().exp += xp
new_pokemon = inventory.Pokemon(evolution)
self.emit_event("pokemon_evolved",
formatted="Evolved {pokemon} [CP {old_cp}] into {new} [IV {iv}] [CP {cp}] [{candy} candies] [+{xp} xp]",
data={"pokemon": pokemon.name,
"new": new_pokemon.name,
"iv": pokemon.iv,
"old_cp": pokemon.cp,
"cp": new_pokemon.cp,
"candy": candy.quantity,
"xp": xp})
if self.config_evolve and (not self.bot.config.test):
new_pokemon = inventory.Pokemon(evolution)
self.evolution_map[pokemon.unique_id] = new_pokemon
inventory.pokemons().remove(pokemon.unique_id)
inventory.pokemons().add(new_pokemon)
with self.bot.database as db:
cursor = db.cursor()
cursor.execute("SELECT COUNT(name) FROM sqlite_master WHERE type='table' AND name='evolve_log'")
db_result = cursor.fetchone()
if db_result[0] == 1:
db.execute("INSERT INTO evolve_log (pokemon, iv, cp) VALUES (?, ?, ?)", (pokemon.name, pokemon.iv, pokemon.cp))
sleep(self.config_evolve_time, 0.1)
if transfer and not self.used_lucky_egg:
# Transfer the new Pokemon imediately!
self.transfer_pokemon([new_pokemon], True)
return True
def upgrade_pokemon(self, pokemon):
upgrade_level = min(self.config_upgrade_level, inventory.player().level + 1.5, 40)
candy = inventory.candies().get(pokemon.pokemon_id)
for i in range(int(pokemon.level * 2), int(upgrade_level * 2)):
upgrade_cost = self.pokemon_upgrade_cost[i - 2]
upgrade_candy_cost = upgrade_cost[0]
upgrade_stardust_cost = upgrade_cost[1]
if self.config_upgrade and (not self.bot.config.test):
response_dict = self.bot.api.upgrade_pokemon(pokemon_id=pokemon.unique_id)
else:
response_dict = {"responses": {"UPGRADE_POKEMON": {"result": SUCCESS}}}
if not response_dict:
return False
result = response_dict.get("responses", {}).get("UPGRADE_POKEMON", {}).get("result", 0)
if result != SUCCESS:
return False
upgrade = response_dict.get("responses", {}).get("UPGRADE_POKEMON", {}).get("upgraded_pokemon", {})
if self.config_upgrade and (not self.bot.config.test):
candy.consume(upgrade_candy_cost)
self.bot.stardust -= upgrade_stardust_cost
new_pokemon = inventory.Pokemon(upgrade)
self.emit_event("pokemon_upgraded",
formatted="Upgraded {pokemon} [IV {iv}] [CP {cp} -> {new_cp}] [{candy} candies] [{stardust} stardust]",
data={"pokemon": pokemon.name,
"iv": pokemon.iv,
"cp": pokemon.cp,
"new_cp": new_pokemon.cp,
"candy": candy.quantity,
"stardust": self.bot.stardust})
if self.config_upgrade and (not self.bot.config.test):
inventory.pokemons().remove(pokemon.unique_id)
new_pokemon = inventory.Pokemon(upgrade)
inventory.pokemons().add(new_pokemon)
pokemon = new_pokemon
action_delay(self.config_action_wait_min, self.config_action_wait_max)
return True
def set_buddy_pokemon(self, pokemon):
if not self.bot.config.test:
response_dict = self.bot.api.set_buddy_pokemon(pokemon_id=pokemon.unique_id)
else:
response_dict = {"responses": {"SET_BUDDY_POKEMON": {"result": SUCCESS, "updated_buddy": {"start_km_walked": 0, "last_km_awarded": 0, "id": 0}}}}
if not response_dict:
return False
result = response_dict.get("responses", {}).get("SET_BUDDY_POKEMON", {}).get("result", 0)
if result != SUCCESS:
return False
if not self.bot.config.test:
self.buddy = response_dict.get("responses", {}).get("SET_BUDDY_POKEMON", {}).get("updated_buddy", {})
self.buddyid = self._get_buddyid()
self.emit_event("buddy_pokemon",
formatted="Buddy {pokemon} [IV {iv}] [CP {cp}]",
data={"pokemon": pokemon.name,
"iv": pokemon.iv,
"cp": pokemon.cp})
self.lock_buddy = True
if not self.bot.config.test:
action_delay(self.config_action_wait_min, self.config_action_wait_max)
return True
def get_buddy_walked(self, pokemon):
if not self.bot.config.test:
response_dict = self.bot.api.get_buddy_walked()
else:
response_dict = {"responses": {"GET_BUDDY_WALKED": {"success": True, "family_candy_id": 0, "candy_earned_count": 0}}}
if not response_dict:
return False
success = response_dict.get("responses", {}).get("GET_BUDDY_WALKED", {}).get("success", False)
if not success:
return False
candy_earned_count = response_dict.get("responses", {}).get("GET_BUDDY_WALKED", {}).get("candy_earned_count", 0)
if candy_earned_count == 0:
return
family_candy_id = self.get_family_id(pokemon)
candy = inventory.candies().get(family_candy_id)
if not self.bot.config.test:
candy.add(candy_earned_count)
self.emit_event("buddy_reward",
formatted="Buddy {pokemon} rewards {family} candies [+{candy_earned} candies] [{candy} candies]",
data={"pokemon": pokemon.name,
"family": candy.type,
"candy_earned": candy_earned_count,
"candy": candy.quantity})
if not self.bot.config.test:
action_delay(self.config_action_wait_min, self.config_action_wait_max)
return True
def _get_buddyid(self):
if self.buddy and'id' in self.buddy:
return self.buddy['id']
return 0
def favor_pokemon(self, pokemon):
response_dict = self.bot.api.set_favorite_pokemon(pokemon_id=pokemon.unique_id, is_favorite=True)
sleep(1.2) # wait a bit after request
if response_dict:
result = response_dict.get('responses', {}).get('SET_FAVORITE_POKEMON', {}).get('result', 0)
if result is 1: # Request success
action_delay(self.config_action_wait_min, self.config_action_wait_max)
# Mark Pokemon as favorite
pokemon.is_favorite = True
self.emit_event("pokemon_favored",
formatted="Favored {pokemon} [IV {iv}] [CP {cp}]",
data={"pokemon": pokemon.name,
"iv": pokemon.iv,
"cp": pokemon.cp})
else:
# Pokemon not found??
self.ignore_favorite.append(pokemon.unique_id)
pokemon.is_favorite = True
self.logger.info("Unable to set %s as favorite!" % pokemon.name)
def unfavor_pokemon(self, pokemon):
response_dict = self.bot.api.set_favorite_pokemon(pokemon_id=pokemon.unique_id, is_favorite=False)
sleep(1.2) # wait a bit after request
if response_dict:
result = response_dict.get('responses', {}).get('SET_FAVORITE_POKEMON', {}).get('result', 0)
if result is 1: # Request success
# Mark Pokemon as no longer favorite
pokemon.is_favorite = False
self.emit_event("pokemon_unfavored",
formatted="Unfavored {pokemon} [IV {iv}] [CP {cp}]",
data={"pokemon": pokemon.name,
"iv": pokemon.iv,
"cp": pokemon.cp})
action_delay(self.config_action_wait_min, self.config_action_wait_max)
| mit |
Titulacion-Sistemas/PythonTitulacion-EV | Lib/site-packages/django/contrib/gis/geos/prototypes/__init__.py | 314 | 1305 | """
This module contains all of the GEOS ctypes function prototypes. Each
prototype handles the interaction between the GEOS library and Python
via ctypes.
"""
# Coordinate sequence routines.
from django.contrib.gis.geos.prototypes.coordseq import (create_cs, get_cs,
cs_clone, cs_getordinate, cs_setordinate, cs_getx, cs_gety, cs_getz,
cs_setx, cs_sety, cs_setz, cs_getsize, cs_getdims)
# Geometry routines.
from django.contrib.gis.geos.prototypes.geom import (from_hex, from_wkb, from_wkt,
create_point, create_linestring, create_linearring, create_polygon, create_collection,
destroy_geom, get_extring, get_intring, get_nrings, get_geomn, geom_clone,
geos_normalize, geos_type, geos_typeid, geos_get_srid, geos_set_srid,
get_dims, get_num_coords, get_num_geoms,
to_hex, to_wkb, to_wkt)
# Miscellaneous routines.
from django.contrib.gis.geos.prototypes.misc import *
# Predicates
from django.contrib.gis.geos.prototypes.predicates import (geos_hasz, geos_isempty,
geos_isring, geos_issimple, geos_isvalid, geos_contains, geos_crosses,
geos_disjoint, geos_equals, geos_equalsexact, geos_intersects,
geos_intersects, geos_overlaps, geos_relatepattern, geos_touches, geos_within)
# Topology routines
from django.contrib.gis.geos.prototypes.topology import *
| mit |
IronLanguages/ironpython2 | Src/StdLib/Lib/site-packages/win32/Demos/security/localized_names.py | 34 | 2030 | # A Python port of the MS knowledge base article Q157234
# "How to deal with localized and renamed user and group names"
# http://support.microsoft.com/default.aspx?kbid=157234
import sys
from win32net import NetUserModalsGet
from win32security import LookupAccountSid
import pywintypes
from ntsecuritycon import *
def LookupAliasFromRid(TargetComputer, Rid):
# Sid is the same regardless of machine, since the well-known
# BUILTIN domain is referenced.
sid = pywintypes.SID()
sid.Initialize(SECURITY_NT_AUTHORITY, 2)
for i, r in enumerate((SECURITY_BUILTIN_DOMAIN_RID, Rid)):
sid.SetSubAuthority(i, r)
name, domain, typ = LookupAccountSid(TargetComputer, sid)
return name
def LookupUserGroupFromRid(TargetComputer, Rid):
# get the account domain Sid on the target machine
# note: if you were looking up multiple sids based on the same
# account domain, only need to call this once.
umi2 = NetUserModalsGet(TargetComputer, 2)
domain_sid = umi2['domain_id']
SubAuthorityCount = domain_sid.GetSubAuthorityCount()
# create and init new sid with acct domain Sid + acct Rid
sid = pywintypes.SID()
sid.Initialize(domain_sid.GetSidIdentifierAuthority(),
SubAuthorityCount+1)
# copy existing subauthorities from account domain Sid into
# new Sid
for i in range(SubAuthorityCount):
sid.SetSubAuthority(i, domain_sid.GetSubAuthority(i))
# append Rid to new Sid
sid.SetSubAuthority(SubAuthorityCount, Rid)
name, domain, typ = LookupAccountSid(TargetComputer, sid)
return name
def main():
if len(sys.argv) == 2:
targetComputer = sys.argv[1]
else:
targetComputer = None
name = LookupUserGroupFromRid(targetComputer, DOMAIN_USER_RID_ADMIN)
print "'Administrator' user name = %s" % (name,)
name = LookupAliasFromRid(targetComputer, DOMAIN_ALIAS_RID_ADMINS)
print "'Administrators' local group/alias name = %s" % (name,)
if __name__=='__main__':
main()
| apache-2.0 |
mhct/ardupilot | Tools/autotest/param_metadata/xmlemit.py | 238 | 2717 | #!/usr/bin/env python
from xml.sax.saxutils import escape, quoteattr
from param import *
from emit import Emit
# Emit APM documentation in an machine readable XML format
class XmlEmit(Emit):
def __init__(self):
wiki_fname = 'apm.pdef.xml'
self.f = open(wiki_fname, mode='w')
preamble = '''<?xml version="1.0" encoding="utf-8"?>
<!-- Dynamically generated list of documented parameters (generated by param_parse.py) -->
<paramfile>
<vehicles>
'''
self.f.write(preamble)
def close(self):
self.f.write('</libraries>')
self.f.write('''</paramfile>\n''')
self.f.close
def emit_comment(self, s):
self.f.write("<!-- " + s + " -->")
def start_libraries(self):
self.f.write('</vehicles>')
self.f.write('<libraries>')
def emit(self, g, f):
t = '''<parameters name=%s>\n''' % quoteattr(g.name) # i.e. ArduPlane
for param in g.params:
# Begin our parameter node
if hasattr(param, 'DisplayName'):
t += '<param humanName=%s name=%s' % (quoteattr(param.DisplayName),quoteattr(param.name)) # i.e. ArduPlane (ArduPlane:FOOPARM)
else:
t += '<param name=%s' % quoteattr(param.name)
if hasattr(param, 'Description'):
t += ' documentation=%s' % quoteattr(param.Description) # i.e. parameter docs
if hasattr(param, 'User'):
t += ' user=%s' % quoteattr(param.User) # i.e. Standard or Advanced
t += ">\n"
# Add values as chidren of this node
for field in param.__dict__.keys():
if field not in ['name', 'DisplayName', 'Description', 'User'] and field in known_param_fields:
if field == 'Values' and Emit.prog_values_field.match(param.__dict__[field]):
t+= "<values>\n"
values = (param.__dict__[field]).split(',')
for value in values:
v = value.split(':')
t+='''<value code=%s>%s</value>\n''' % (quoteattr(v[0]), escape(v[1])) # i.e. numeric value, string label
t += "</values>\n"
else:
t += '''<field name=%s>%s</field>\n''' % (quoteattr(field), escape(param.__dict__[field])) # i.e. Range: 0 10
t += '''</param>\n'''
t += '''</parameters>\n'''
#print t
self.f.write(t)
| gpl-3.0 |
apache/airflow | tests/providers/amazon/aws/hooks/test_batch_client.py | 2 | 13457 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from unittest import mock
import botocore.exceptions
import pytest
from parameterized import parameterized
from airflow.exceptions import AirflowException
from airflow.providers.amazon.aws.hooks.batch_client import AwsBatchClientHook
# Use dummy AWS credentials
AWS_REGION = "eu-west-1"
AWS_ACCESS_KEY_ID = "airflow_dummy_key"
AWS_SECRET_ACCESS_KEY = "airflow_dummy_secret"
JOB_ID = "8ba9d676-4108-4474-9dca-8bbac1da9b19"
class TestAwsBatchClient(unittest.TestCase):
MAX_RETRIES = 2
STATUS_RETRIES = 3
@mock.patch.dict("os.environ", AWS_DEFAULT_REGION=AWS_REGION)
@mock.patch.dict("os.environ", AWS_ACCESS_KEY_ID=AWS_ACCESS_KEY_ID)
@mock.patch.dict("os.environ", AWS_SECRET_ACCESS_KEY=AWS_SECRET_ACCESS_KEY)
@mock.patch("airflow.providers.amazon.aws.hooks.batch_client.AwsBaseHook.get_client_type")
def setUp(self, get_client_type_mock):
self.get_client_type_mock = get_client_type_mock
self.batch_client = AwsBatchClientHook(
max_retries=self.MAX_RETRIES,
status_retries=self.STATUS_RETRIES,
aws_conn_id='airflow_test',
region_name=AWS_REGION,
)
self.client_mock = get_client_type_mock.return_value
assert self.batch_client.client == self.client_mock # setup client property
# don't pause in these unit tests
self.mock_delay = mock.Mock(return_value=None)
self.batch_client.delay = self.mock_delay
self.mock_exponential_delay = mock.Mock(return_value=0)
self.batch_client.exponential_delay = self.mock_exponential_delay
def test_init(self):
assert self.batch_client.max_retries == self.MAX_RETRIES
assert self.batch_client.status_retries == self.STATUS_RETRIES
assert self.batch_client.region_name == AWS_REGION
assert self.batch_client.aws_conn_id == 'airflow_test'
assert self.batch_client.client == self.client_mock
self.get_client_type_mock.assert_called_once_with("batch", region_name=AWS_REGION)
def test_wait_for_job_with_success(self):
self.client_mock.describe_jobs.return_value = {"jobs": [{"jobId": JOB_ID, "status": "SUCCEEDED"}]}
with mock.patch.object(
self.batch_client,
"poll_for_job_running",
wraps=self.batch_client.poll_for_job_running,
) as job_running:
self.batch_client.wait_for_job(JOB_ID)
job_running.assert_called_once_with(JOB_ID, None)
with mock.patch.object(
self.batch_client,
"poll_for_job_complete",
wraps=self.batch_client.poll_for_job_complete,
) as job_complete:
self.batch_client.wait_for_job(JOB_ID)
job_complete.assert_called_once_with(JOB_ID, None)
assert self.client_mock.describe_jobs.call_count == 4
def test_wait_for_job_with_failure(self):
self.client_mock.describe_jobs.return_value = {"jobs": [{"jobId": JOB_ID, "status": "FAILED"}]}
with mock.patch.object(
self.batch_client,
"poll_for_job_running",
wraps=self.batch_client.poll_for_job_running,
) as job_running:
self.batch_client.wait_for_job(JOB_ID)
job_running.assert_called_once_with(JOB_ID, None)
with mock.patch.object(
self.batch_client,
"poll_for_job_complete",
wraps=self.batch_client.poll_for_job_complete,
) as job_complete:
self.batch_client.wait_for_job(JOB_ID)
job_complete.assert_called_once_with(JOB_ID, None)
assert self.client_mock.describe_jobs.call_count == 4
def test_poll_job_running_for_status_running(self):
self.client_mock.describe_jobs.return_value = {"jobs": [{"jobId": JOB_ID, "status": "RUNNING"}]}
self.batch_client.poll_for_job_running(JOB_ID)
self.client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID])
def test_poll_job_complete_for_status_success(self):
self.client_mock.describe_jobs.return_value = {"jobs": [{"jobId": JOB_ID, "status": "SUCCEEDED"}]}
self.batch_client.poll_for_job_complete(JOB_ID)
self.client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID])
def test_poll_job_complete_raises_for_max_retries(self):
self.client_mock.describe_jobs.return_value = {"jobs": [{"jobId": JOB_ID, "status": "RUNNING"}]}
with pytest.raises(AirflowException) as ctx:
self.batch_client.poll_for_job_complete(JOB_ID)
msg = f"AWS Batch job ({JOB_ID}) status checks exceed max_retries"
assert msg in str(ctx.value)
self.client_mock.describe_jobs.assert_called_with(jobs=[JOB_ID])
assert self.client_mock.describe_jobs.call_count == self.MAX_RETRIES + 1
def test_poll_job_status_hit_api_throttle(self):
self.client_mock.describe_jobs.side_effect = botocore.exceptions.ClientError(
error_response={"Error": {"Code": "TooManyRequestsException"}},
operation_name="get job description",
)
with pytest.raises(AirflowException) as ctx:
self.batch_client.poll_for_job_complete(JOB_ID)
msg = f"AWS Batch job ({JOB_ID}) description error"
assert msg in str(ctx.value)
# It should retry when this client error occurs
self.client_mock.describe_jobs.assert_called_with(jobs=[JOB_ID])
assert self.client_mock.describe_jobs.call_count == self.STATUS_RETRIES
def test_poll_job_status_with_client_error(self):
self.client_mock.describe_jobs.side_effect = botocore.exceptions.ClientError(
error_response={"Error": {"Code": "InvalidClientTokenId"}},
operation_name="get job description",
)
with pytest.raises(AirflowException) as ctx:
self.batch_client.poll_for_job_complete(JOB_ID)
msg = f"AWS Batch job ({JOB_ID}) description error"
assert msg in str(ctx.value)
# It will not retry when this client error occurs
self.client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID])
def test_check_job_success(self):
self.client_mock.describe_jobs.return_value = {"jobs": [{"jobId": JOB_ID, "status": "SUCCEEDED"}]}
status = self.batch_client.check_job_success(JOB_ID)
assert status
self.client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID])
def test_check_job_success_raises_failed(self):
self.client_mock.describe_jobs.return_value = {
"jobs": [
{
"jobId": JOB_ID,
"status": "FAILED",
"statusReason": "This is an error reason",
"attempts": [{"exitCode": 1}],
}
]
}
with pytest.raises(AirflowException) as ctx:
self.batch_client.check_job_success(JOB_ID)
self.client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID])
msg = f"AWS Batch job ({JOB_ID}) failed"
assert msg in str(ctx.value)
def test_check_job_success_raises_failed_for_multiple_attempts(self):
self.client_mock.describe_jobs.return_value = {
"jobs": [
{
"jobId": JOB_ID,
"status": "FAILED",
"statusReason": "This is an error reason",
"attempts": [{"exitCode": 1}, {"exitCode": 10}],
}
]
}
with pytest.raises(AirflowException) as ctx:
self.batch_client.check_job_success(JOB_ID)
self.client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID])
msg = f"AWS Batch job ({JOB_ID}) failed"
assert msg in str(ctx.value)
def test_check_job_success_raises_incomplete(self):
self.client_mock.describe_jobs.return_value = {"jobs": [{"jobId": JOB_ID, "status": "RUNNABLE"}]}
with pytest.raises(AirflowException) as ctx:
self.batch_client.check_job_success(JOB_ID)
self.client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID])
msg = f"AWS Batch job ({JOB_ID}) is not complete"
assert msg in str(ctx.value)
def test_check_job_success_raises_unknown_status(self):
status = "STRANGE"
self.client_mock.describe_jobs.return_value = {"jobs": [{"jobId": JOB_ID, "status": status}]}
with pytest.raises(AirflowException) as ctx:
self.batch_client.check_job_success(JOB_ID)
self.client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID])
msg = f"AWS Batch job ({JOB_ID}) has unknown status"
assert msg in str(ctx.value)
assert status in str(ctx.value)
def test_check_job_success_raises_without_jobs(self):
self.client_mock.describe_jobs.return_value = {"jobs": []}
with pytest.raises(AirflowException) as ctx:
self.batch_client.check_job_success(JOB_ID)
self.client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID])
msg = f"AWS Batch job ({JOB_ID}) description error"
assert msg in str(ctx.value)
def test_terminate_job(self):
self.client_mock.terminate_job.return_value = {}
reason = "Task killed by the user"
response = self.batch_client.terminate_job(JOB_ID, reason)
self.client_mock.terminate_job.assert_called_once_with(jobId=JOB_ID, reason=reason)
assert response == {}
class TestAwsBatchClientDelays(unittest.TestCase):
@mock.patch.dict("os.environ", AWS_DEFAULT_REGION=AWS_REGION)
@mock.patch.dict("os.environ", AWS_ACCESS_KEY_ID=AWS_ACCESS_KEY_ID)
@mock.patch.dict("os.environ", AWS_SECRET_ACCESS_KEY=AWS_SECRET_ACCESS_KEY)
def setUp(self):
self.batch_client = AwsBatchClientHook(aws_conn_id='airflow_test', region_name=AWS_REGION)
def test_init(self):
assert self.batch_client.max_retries == self.batch_client.MAX_RETRIES
assert self.batch_client.status_retries == self.batch_client.STATUS_RETRIES
assert self.batch_client.region_name == AWS_REGION
assert self.batch_client.aws_conn_id == 'airflow_test'
def test_add_jitter(self):
minima = 0
width = 5
result = self.batch_client.add_jitter(0, width=width, minima=minima)
assert result >= minima
assert result <= width
@mock.patch("airflow.providers.amazon.aws.hooks.batch_client.uniform")
@mock.patch("airflow.providers.amazon.aws.hooks.batch_client.sleep")
def test_delay_defaults(self, mock_sleep, mock_uniform):
assert AwsBatchClientHook.DEFAULT_DELAY_MIN == 1
assert AwsBatchClientHook.DEFAULT_DELAY_MAX == 10
mock_uniform.return_value = 0
self.batch_client.delay()
mock_uniform.assert_called_once_with(
AwsBatchClientHook.DEFAULT_DELAY_MIN, AwsBatchClientHook.DEFAULT_DELAY_MAX
)
mock_sleep.assert_called_once_with(0)
@mock.patch("airflow.providers.amazon.aws.hooks.batch_client.uniform")
@mock.patch("airflow.providers.amazon.aws.hooks.batch_client.sleep")
def test_delay_with_zero(self, mock_sleep, mock_uniform):
self.batch_client.delay(0)
mock_uniform.assert_called_once_with(0, 1) # in add_jitter
mock_sleep.assert_called_once_with(mock_uniform.return_value)
@mock.patch("airflow.providers.amazon.aws.hooks.batch_client.uniform")
@mock.patch("airflow.providers.amazon.aws.hooks.batch_client.sleep")
def test_delay_with_int(self, mock_sleep, mock_uniform):
self.batch_client.delay(5)
mock_uniform.assert_called_once_with(4, 6) # in add_jitter
mock_sleep.assert_called_once_with(mock_uniform.return_value)
@mock.patch("airflow.providers.amazon.aws.hooks.batch_client.uniform")
@mock.patch("airflow.providers.amazon.aws.hooks.batch_client.sleep")
def test_delay_with_float(self, mock_sleep, mock_uniform):
self.batch_client.delay(5.0)
mock_uniform.assert_called_once_with(4.0, 6.0) # in add_jitter
mock_sleep.assert_called_once_with(mock_uniform.return_value)
@parameterized.expand(
[
(0, 0, 1),
(1, 0, 2),
(2, 0, 3),
(3, 1, 5),
(4, 2, 7),
(5, 3, 11),
(6, 4, 14),
(7, 6, 19),
(8, 8, 25),
(9, 10, 31),
(45, 200, 600), # > 40 tries invokes maximum delay allowed
]
)
def test_exponential_delay(self, tries, lower, upper):
result = self.batch_client.exponential_delay(tries)
assert result >= lower
assert result <= upper
| apache-2.0 |
guorendong/iridium-browser-ubuntu | tools/telemetry/telemetry/page/page_test.py | 3 | 7843 | # Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import action_runner as action_runner_module
from telemetry.page import test_expectations
class TestNotSupportedOnPlatformError(Exception):
"""PageTest Exception raised when a required feature is unavailable.
The feature required to run the test could be part of the platform,
hardware configuration, or browser.
"""
class MultiTabTestAppCrashError(Exception):
"""PageTest Exception raised after browser or tab crash for multi-tab tests.
Used to abort the test rather than try to recover from an unknown state.
"""
class Failure(Exception):
"""PageTest Exception raised when an undesired but designed-for problem."""
class MeasurementFailure(Failure):
"""PageTest Exception raised when an undesired but designed-for problem."""
class PageTest(object):
"""A class styled on unittest.TestCase for creating page-specific tests.
Test should override ValidateAndMeasurePage to perform test
validation and page measurement as necessary.
class BodyChildElementMeasurement(PageTest):
def ValidateAndMeasurePage(self, page, tab, results):
body_child_count = tab.EvaluateJavaScript(
'document.body.children.length')
results.AddValue(scalar.ScalarValue(
page, 'body_children', 'count', body_child_count))
Args:
discard_first_run: Discard the first run of this page. This is
usually used with page_repeat and pageset_repeat options.
"""
def __init__(self,
needs_browser_restart_after_each_page=False,
discard_first_result=False,
clear_cache_before_each_run=False):
super(PageTest, self).__init__()
self.options = None
self._needs_browser_restart_after_each_page = (
needs_browser_restart_after_each_page)
self._discard_first_result = discard_first_result
self._clear_cache_before_each_run = clear_cache_before_each_run
self._close_tabs_before_run = True
@property
def is_multi_tab_test(self):
"""Returns True if the test opens multiple tabs.
If the test overrides TabForPage, it is deemed a multi-tab test.
Multi-tab tests do not retry after tab or browser crashes, whereas,
single-tab tests too. That is because the state of multi-tab tests
(e.g., how many tabs are open, etc.) is unknown after crashes.
"""
return self.TabForPage.__func__ is not PageTest.TabForPage.__func__
@property
def discard_first_result(self):
"""When set to True, the first run of the test is discarded. This is
useful for cases where it's desirable to have some test resource cached so
the first run of the test can warm things up. """
return self._discard_first_result
@discard_first_result.setter
def discard_first_result(self, discard):
self._discard_first_result = discard
@property
def clear_cache_before_each_run(self):
"""When set to True, the browser's disk and memory cache will be cleared
before each run."""
return self._clear_cache_before_each_run
@property
def close_tabs_before_run(self):
"""When set to True, all tabs are closed before running the test for the
first time."""
return self._close_tabs_before_run
@close_tabs_before_run.setter
def close_tabs_before_run(self, close_tabs):
self._close_tabs_before_run = close_tabs
def RestartBrowserBeforeEachPage(self):
""" Should the browser be restarted for the page?
This returns true if the test needs to unconditionally restart the
browser for each page. It may be called before the browser is started.
"""
return self._needs_browser_restart_after_each_page
def StopBrowserAfterPage(self, browser, page): # pylint: disable=W0613
"""Should the browser be stopped after the page is run?
This is called after a page is run to decide whether the browser needs to
be stopped to clean up its state. If it is stopped, then it will be
restarted to run the next page.
A test that overrides this can look at both the page and the browser to
decide whether it needs to stop the browser.
"""
return False
def CustomizeBrowserOptions(self, options):
"""Override to add test-specific options to the BrowserOptions object"""
def CustomizeBrowserOptionsForSinglePage(self, page, options):
"""Set options specific to the test and the given page.
This will be called with the current page when the browser is (re)started.
Changing options at this point only makes sense if the browser is being
restarted for each page. Note that if page has a startup_url, the browser
will always be restarted for each run.
"""
if page.startup_url:
options.browser_options.startup_url = page.startup_url
def WillStartBrowser(self, platform):
"""Override to manipulate the browser environment before it launches."""
def DidStartBrowser(self, browser):
"""Override to customize the browser right after it has launched."""
def SetOptions(self, options):
"""Sets the BrowserFinderOptions instance to use."""
self.options = options
def WillNavigateToPage(self, page, tab):
"""Override to do operations before the page is navigated, notably Telemetry
will already have performed the following operations on the browser before
calling this function:
* Ensure only one tab is open.
* Call WaitForDocumentReadyStateToComplete on the tab."""
def DidNavigateToPage(self, page, tab):
"""Override to do operations right after the page is navigated and after
all waiting for completion has occurred."""
def CleanUpAfterPage(self, page, tab):
"""Called after the test run method was run, even if it failed."""
def CreateExpectations(self, page_set): # pylint: disable=W0613
"""Override to make this test generate its own expectations instead of
any that may have been defined in the page set."""
return test_expectations.TestExpectations()
def TabForPage(self, page, browser): # pylint: disable=W0613
"""Override to select a different tab for the page. For instance, to
create a new tab for every page, return browser.tabs.New()."""
return browser.tabs[0]
def ValidateAndMeasurePage(self, page, tab, results):
"""Override to check test assertions and perform measurement.
When adding measurement results, call results.AddValue(...) for
each result. Raise an exception or add a failure.FailureValue on
failure. page_test.py also provides several base exception classes
to use.
Prefer metric value names that are in accordance with python
variable style. e.g., metric_name. The name 'url' must not be used.
Put together:
def ValidateAndMeasurePage(self, page, tab, results):
res = tab.EvaluateJavaScript('2+2')
if res != 4:
raise Exception('Oh, wow.')
results.AddValue(scalar.ScalarValue(
page, 'two_plus_two', 'count', res))
Args:
page: A telemetry.page.Page instance.
tab: A telemetry.core.Tab instance.
results: A telemetry.results.PageTestResults instance.
"""
raise NotImplementedError
def RunPage(self, page, tab, results):
# Run actions.
action_runner = action_runner_module.ActionRunner(
tab, skip_waits=page.skip_waits)
page.RunPageInteractions(action_runner)
self.ValidateAndMeasurePage(page, tab, results)
def RunNavigateSteps(self, page, tab):
"""Navigates the tab to the page URL attribute.
Runs the 'navigate_steps' page attribute as a compound action.
"""
action_runner = action_runner_module.ActionRunner(
tab, skip_waits=page.skip_waits)
page.RunNavigateSteps(action_runner)
| bsd-3-clause |
sunny94/temp | sympy/integrals/integrals.py | 7 | 49446 | from __future__ import print_function, division
from sympy.concrete.expr_with_limits import AddWithLimits
from sympy.core.add import Add
from sympy.core.basic import Basic, C
from sympy.core.compatibility import is_sequence, xrange
from sympy.core.containers import Tuple
from sympy.core.expr import Expr
from sympy.core.function import diff
from sympy.core.numbers import oo
from sympy.core.relational import Eq
from sympy.sets.sets import Interval
from sympy.core.singleton import S
from sympy.core.symbol import (Dummy, Symbol, Wild)
from sympy.core.sympify import sympify
from sympy.integrals.manualintegrate import manualintegrate
from sympy.integrals.trigonometry import trigintegrate
from sympy.integrals.deltafunctions import deltaintegrate
from sympy.integrals.rationaltools import ratint
from sympy.integrals.heurisch import heurisch, heurisch_wrapper
from sympy.integrals.meijerint import meijerint_definite, meijerint_indefinite
from sympy.utilities import xthreaded, flatten
from sympy.utilities.misc import filldedent
from sympy.polys import Poly, PolynomialError
from sympy.solvers.solvers import solve, posify
from sympy.functions import Piecewise, sqrt, sign
from sympy.geometry import Curve
from sympy.functions.elementary.piecewise import piecewise_fold
from sympy.series import limit
class Integral(AddWithLimits):
"""Represents unevaluated integral."""
__slots__ = ['is_commutative']
def __new__(cls, function, *symbols, **assumptions):
"""Create an unevaluated integral.
Arguments are an integrand followed by one or more limits.
If no limits are given and there is only one free symbol in the
expression, that symbol will be used, otherwise an error will be
raised.
>>> from sympy import Integral
>>> from sympy.abc import x, y
>>> Integral(x)
Integral(x, x)
>>> Integral(y)
Integral(y, y)
When limits are provided, they are interpreted as follows (using
``x`` as though it were the variable of integration):
(x,) or x - indefinite integral
(x, a) - "evaluate at" integral is an abstract antiderivative
(x, a, b) - definite integral
The ``as_dummy`` method can be used to see which symbols cannot be
targeted by subs: those with a preppended underscore cannot be
changed with ``subs``. (Also, the integration variables themselves --
the first element of a limit -- can never be changed by subs.)
>>> i = Integral(x, x)
>>> at = Integral(x, (x, x))
>>> i.as_dummy()
Integral(x, x)
>>> at.as_dummy()
Integral(_x, (_x, x))
"""
#This will help other classes define their own definitions
#of behaviour with Integral.
if hasattr(function, '_eval_Integral'):
return function._eval_Integral(*symbols, **assumptions)
obj = AddWithLimits.__new__(cls, function, *symbols, **assumptions)
return obj
def __getnewargs__(self):
return (self.function,) + tuple([tuple(xab) for xab in self.limits])
@property
def free_symbols(self):
"""
This method returns the symbols that will exist when the
integral is evaluated. This is useful if one is trying to
determine whether an integral depends on a certain
symbol or not.
Examples
========
>>> from sympy import Integral
>>> from sympy.abc import x, y
>>> Integral(x, (x, y, 1)).free_symbols
set([y])
See Also
========
function, limits, variables
"""
return AddWithLimits.free_symbols.fget(self)
def _eval_is_zero(self):
# This is a very naive and quick test, not intended to do the integral to
# answer whether it is zero or not, e.g. Integral(sin(x), (x, 0, 2*pi))
# is zero but this routine should return None for that case. But, like
# Mul, there are trivial situations for which the integral will be
# zero so we check for those.
if self.function.is_zero:
return True
got_none = False
for l in self.limits:
if len(l) == 3:
z = (l[1] == l[2]) or (l[1] - l[2]).is_zero
if z:
return True
elif z is None:
got_none = True
free = self.function.free_symbols
for xab in self.limits:
if len(xab) == 1:
free.add(xab[0])
continue
if len(xab) == 2 and xab[0] not in free:
if xab[1].is_zero:
return True
elif xab[1].is_zero is None:
got_none = True
# take integration symbol out of free since it will be replaced
# with the free symbols in the limits
free.discard(xab[0])
# add in the new symbols
for i in xab[1:]:
free.update(i.free_symbols)
if self.function.is_zero is False and got_none is False:
return False
def transform(self, x, u):
r"""
Performs a change of variables from `x` to `u` using the relationship
given by `x` and `u` which will define the transformations `f` and `F`
(which are inverses of each other) as follows:
1) If `x` is a Symbol (which is a variable of integration) then `u`
will be interpreted as some function, f(u), with inverse F(u).
This, in effect, just makes the substitution of x with f(x).
2) If `u` is a Symbol then `x` will be interpreted as some function,
F(x), with inverse f(u). This is commonly referred to as
u-substitution.
Once f and F have been identified, the transformation is made as
follows:
.. math:: \int_a^b x \mathrm{d}x \rightarrow \int_{F(a)}^{F(b)} f(x)
\frac{\mathrm{d}}{\mathrm{d}x}
where `F(x)` is the inverse of `f(x)` and the limits and integrand have
been corrected so as to retain the same value after integration.
Notes
=====
The mappings, F(x) or f(u), must lead to a unique integral. Linear
or rational linear expression, `2*x`, `1/x` and `sqrt(x)`, will
always work; quadratic expressions like `x**2 - 1` are acceptable
as long as the resulting integrand does not depend on the sign of
the solutions (see examples).
The integral will be returned unchanged if `x` is not a variable of
integration.
`x` must be (or contain) only one of of the integration variables. If
`u` has more than one free symbol then it should be sent as a tuple
(`u`, `uvar`) where `uvar` identifies which variable is replacing
the integration variable.
XXX can it contain another integration variable?
Examples
========
>>> from sympy.abc import a, b, c, d, x, u, y
>>> from sympy import Integral, S, cos, sqrt
>>> i = Integral(x*cos(x**2 - 1), (x, 0, 1))
transform can change the variable of integration
>>> i.transform(x, u)
Integral(u*cos(u**2 - 1), (u, 0, 1))
transform can perform u-substitution as long as a unique
integrand is obtained:
>>> i.transform(x**2 - 1, u)
Integral(cos(u)/2, (u, -1, 0))
This attempt fails because x = +/-sqrt(u + 1) and the
sign does not cancel out of the integrand:
>>> Integral(cos(x**2 - 1), (x, 0, 1)).transform(x**2 - 1, u)
Traceback (most recent call last):
...
ValueError:
The mapping between F(x) and f(u) did not give a unique integrand.
transform can do a substitution. Here, the previous
result is transformed back into the original expression
using "u-substitution":
>>> ui = _
>>> _.transform(sqrt(u + 1), x) == i
True
We can accomplish the same with a regular substitution:
>>> ui.transform(u, x**2 - 1) == i
True
If the `x` does not contain a symbol of integration then
the integral will be returned unchanged. Integral `i` does
not have an integration variable `a` so no change is made:
>>> i.transform(a, x) == i
True
When `u` has more than one free symbol the symbol that is
replacing `x` must be identified by passing `u` as a tuple:
>>> Integral(x, (x, 0, 1)).transform(x, (u + a, u))
Integral(a + u, (u, -a, -a + 1))
>>> Integral(x, (x, 0, 1)).transform(x, (u + a, a))
Integral(a + u, (a, -u, -u + 1))
See Also
========
variables : Lists the integration variables
as_dummy : Replace integration variables with dummy ones
"""
d = Dummy('d')
xfree = x.free_symbols.intersection(self.variables)
if len(xfree) > 1:
raise ValueError(
'F(x) can only contain one of: %s' % self.variables)
xvar = xfree.pop() if xfree else d
if xvar not in self.variables:
return self
u = sympify(u)
if isinstance(u, Expr):
ufree = u.free_symbols
if len(ufree) != 1:
raise ValueError(filldedent('''
When f(u) has more than one free symbol, the one replacing x
must be identified: pass f(u) as (f(u), u)'''))
uvar = ufree.pop()
else:
u, uvar = u
if uvar not in u.free_symbols:
raise ValueError(filldedent('''
Expecting a tuple (expr, symbol) where symbol identified
a free symbol in expr, but symbol is not in expr's free
symbols.'''))
if not isinstance(uvar, Symbol):
raise ValueError(filldedent('''
Expecting a tuple (expr, symbol) but didn't get
a symbol; got %s''' % uvar))
if x.is_Symbol and u.is_Symbol:
return self.xreplace({x: u})
if not x.is_Symbol and not u.is_Symbol:
raise ValueError('either x or u must be a symbol')
if uvar == xvar:
return self.transform(x, (u.subs(uvar, d), d)).xreplace({d: uvar})
if uvar in self.limits:
raise ValueError(filldedent('''
u must contain the same variable as in x
or a variable that is not already an integration variable'''))
if not x.is_Symbol:
F = [x.subs(xvar, d)]
soln = solve(u - x, xvar, check=False)
if not soln:
raise ValueError('no solution for solve(F(x) - f(u), x)')
f = [fi.subs(uvar, d) for fi in soln]
else:
f = [u.subs(uvar, d)]
pdiff, reps = posify(u - x)
puvar = uvar.subs([(v, k) for k, v in reps.items()])
soln = [s.subs(reps) for s in solve(pdiff, puvar)]
if not soln:
raise ValueError('no solution for solve(F(x) - f(u), u)')
F = [fi.subs(xvar, d) for fi in soln]
newfuncs = set([(self.function.subs(xvar, fi)*fi.diff(d)
).subs(d, uvar) for fi in f])
if len(newfuncs) > 1:
raise ValueError(filldedent('''
The mapping between F(x) and f(u) did not give
a unique integrand.'''))
newfunc = newfuncs.pop()
def _calc_limit_1(F, a, b):
"""
replace d with a, using subs if possible, otherwise limit
where sign of b is considered
"""
wok = F.subs(d, a)
if wok is S.NaN or wok.is_finite is False and a.is_finite:
return limit(sign(b)*F, d, a)
return wok
def _calc_limit(a, b):
"""
replace d with a, using subs if possible, otherwise limit
where sign of b is considered
"""
avals = list(set([_calc_limit_1(Fi, a, b) for Fi in F]))
if len(avals) > 1:
raise ValueError(filldedent('''
The mapping between F(x) and f(u) did not
give a unique limit.'''))
return avals[0]
newlimits = []
for xab in self.limits:
sym = xab[0]
if sym == xvar:
if len(xab) == 3:
a, b = xab[1:]
a, b = _calc_limit(a, b), _calc_limit(b, a)
if a - b > 0:
a, b = b, a
newfunc = -newfunc
newlimits.append((uvar, a, b))
elif len(xab) == 2:
a = _calc_limit(xab[1], 1)
newlimits.append((uvar, a))
else:
newlimits.append(uvar)
else:
newlimits.append(xab)
return self.func(newfunc, *newlimits)
def doit(self, **hints):
"""
Perform the integration using any hints given.
Examples
========
>>> from sympy import Integral
>>> from sympy.abc import x, i
>>> Integral(x**i, (i, 1, 3)).doit()
Piecewise((2, log(x) == 0), (x**3/log(x) - x/log(x), True))
See Also
========
sympy.integrals.trigonometry.trigintegrate
sympy.integrals.risch.heurisch
sympy.integrals.rationaltools.ratint
as_sum : Approximate the integral using a sum
"""
if not hints.get('integrals', True):
return self
deep = hints.get('deep', True)
meijerg = hints.get('meijerg', None)
conds = hints.get('conds', 'piecewise')
risch = hints.get('risch', None)
manual = hints.get('manual', None)
if conds not in ['separate', 'piecewise', 'none']:
raise ValueError('conds must be one of "separate", "piecewise", '
'"none", got: %s' % conds)
if risch and any(len(xab) > 1 for xab in self.limits):
raise ValueError('risch=True is only allowed for indefinite integrals.')
# check for the trivial zero
if self.is_zero:
return S.Zero
# now compute and check the function
function = self.function
if deep:
function = function.doit(**hints)
if function.is_zero:
return S.Zero
# There is no trivial answer, so continue
undone_limits = []
# ulj = free symbols of any undone limits' upper and lower limits
ulj = set()
for xab in self.limits:
# compute uli, the free symbols in the
# Upper and Lower limits of limit I
if len(xab) == 1:
uli = set(xab[:1])
elif len(xab) == 2:
uli = xab[1].free_symbols
elif len(xab) == 3:
uli = xab[1].free_symbols.union(xab[2].free_symbols)
# this integral can be done as long as there is no blocking
# limit that has been undone. An undone limit is blocking if
# it contains an integration variable that is in this limit's
# upper or lower free symbols or vice versa
if xab[0] in ulj or any(v[0] in uli for v in undone_limits):
undone_limits.append(xab)
ulj.update(uli)
function = self.func(*([function] + [xab]))
factored_function = function.factor()
if not isinstance(factored_function, Integral):
function = factored_function
continue
# There are a number of tradeoffs in using the meijer g method.
# It can sometimes be a lot faster than other methods, and
# sometimes slower. And there are certain types of integrals for
# which it is more likely to work than others.
# These heuristics are incorporated in deciding what integration
# methods to try, in what order.
# See the integrate() docstring for details.
def try_meijerg(function, xab):
ret = None
if len(xab) == 3 and meijerg is not False:
x, a, b = xab
try:
res = meijerint_definite(function, x, a, b)
except NotImplementedError:
from sympy.integrals.meijerint import _debug
_debug('NotImplementedError from meijerint_definite')
res = None
if res is not None:
f, cond = res
if conds == 'piecewise':
ret = Piecewise((f, cond),
(self.func(function, (x, a, b)), True))
elif conds == 'separate':
if len(self.limits) != 1:
raise ValueError('conds=separate not supported in '
'multiple integrals')
ret = f, cond
else:
ret = f
return ret
meijerg1 = meijerg
if len(xab) == 3 and xab[1].is_real and xab[2].is_real \
and not function.is_Poly and \
(xab[1].has(oo, -oo) or xab[2].has(oo, -oo)):
ret = try_meijerg(function, xab)
if ret is not None:
function = ret
continue
else:
meijerg1 = False
# If the special meijerg code did not succeed finding a definite
# integral, then the code using meijerint_indefinite will not either
# (it might find an antiderivative, but the answer is likely to be
# nonsensical).
# Thus if we are requested to only use meijer g-function methods,
# we give up at this stage. Otherwise we just disable g-function
# methods.
if meijerg1 is False and meijerg is True:
antideriv = None
else:
antideriv = self._eval_integral(
function, xab[0],
meijerg=meijerg1, risch=risch, manual=manual,
conds=conds)
if antideriv is None and meijerg1 is True:
ret = try_meijerg(function, xab)
if ret is not None:
function = ret
continue
if antideriv is None:
undone_limits.append(xab)
function = self.func(*([function] + [xab])).factor()
factored_function = function.factor()
if not isinstance(factored_function, Integral):
function = factored_function
continue
else:
if len(xab) == 1:
function = antideriv
else:
if len(xab) == 3:
x, a, b = xab
elif len(xab) == 2:
x, b = xab
a = None
else:
raise NotImplementedError
if deep:
if isinstance(a, Basic):
a = a.doit(**hints)
if isinstance(b, Basic):
b = b.doit(**hints)
if antideriv.is_Poly:
gens = list(antideriv.gens)
gens.remove(x)
antideriv = antideriv.as_expr()
function = antideriv._eval_interval(x, a, b)
function = Poly(function, *gens)
elif isinstance(antideriv, Add):
function = Add(*[i._eval_interval(x,a,b) for i in
Add.make_args(antideriv)])
else:
try:
function = antideriv._eval_interval(x, a, b)
except NotImplementedError:
# This can happen if _eval_interval depends in a
# complicated way on limits that cannot be computed
undone_limits.append(xab)
function = self.func(*([function] + [xab]))
factored_function = function.factor()
if not isinstance(factored_function, Integral):
function = factored_function
return function
def _eval_derivative(self, sym):
"""Evaluate the derivative of the current Integral object by
differentiating under the integral sign [1], using the Fundamental
Theorem of Calculus [2] when possible.
Whenever an Integral is encountered that is equivalent to zero or
has an integrand that is independent of the variable of integration
those integrals are performed. All others are returned as Integral
instances which can be resolved with doit() (provided they are integrable).
References:
[1] http://en.wikipedia.org/wiki/Differentiation_under_the_integral_sign
[2] http://en.wikipedia.org/wiki/Fundamental_theorem_of_calculus
Examples
========
>>> from sympy import Integral
>>> from sympy.abc import x, y
>>> i = Integral(x + y, y, (y, 1, x))
>>> i.diff(x)
Integral(x + y, (y, x)) + Integral(1, y, (y, 1, x))
>>> i.doit().diff(x) == i.diff(x).doit()
True
>>> i.diff(y)
0
The previous must be true since there is no y in the evaluated integral:
>>> i.free_symbols
set([x])
>>> i.doit()
2*x**3/3 - x/2 - 1/6
"""
# differentiate under the integral sign; we do not
# check for regularity conditions (TODO), see issue 4215
# get limits and the function
f, limits = self.function, list(self.limits)
# the order matters if variables of integration appear in the limits
# so work our way in from the outside to the inside.
limit = limits.pop(-1)
if len(limit) == 3:
x, a, b = limit
elif len(limit) == 2:
x, b = limit
a = None
else:
a = b = None
x = limit[0]
if limits: # f is the argument to an integral
f = self.func(f, *tuple(limits))
# assemble the pieces
def _do(f, ab):
dab_dsym = diff(ab, sym)
if not dab_dsym:
return S.Zero
if isinstance(f, Integral):
limits = [(x, x) if (len(l) == 1 and l[0] == x) else l
for l in f.limits]
f = self.func(f.function, *limits)
return f.subs(x, ab)*dab_dsym
rv = 0
if b is not None:
rv += _do(f, b)
if a is not None:
rv -= _do(f, a)
if len(limit) == 1 and sym == x:
# the dummy variable *is* also the real-world variable
arg = f
rv += arg
else:
# the dummy variable might match sym but it's
# only a dummy and the actual variable is determined
# by the limits, so mask off the variable of integration
# while differentiating
u = Dummy('u')
arg = f.subs(x, u).diff(sym).subs(u, x)
rv += self.func(arg, Tuple(x, a, b))
return rv
def _eval_integral(self, f, x, meijerg=None, risch=None, manual=None,
conds='piecewise'):
"""
Calculate the anti-derivative to the function f(x).
The following algorithms are applied (roughly in this order):
1. Simple heuristics (based on pattern matching and integral table):
- most frequently used functions (e.g. polynomials, products of trig functions)
2. Integration of rational functions:
- A complete algorithm for integrating rational functions is
implemented (the Lazard-Rioboo-Trager algorithm). The algorithm
also uses the partial fraction decomposition algorithm
implemented in apart() as a preprocessor to make this process
faster. Note that the integral of a rational function is always
elementary, but in general, it may include a RootSum.
3. Full Risch algorithm:
- The Risch algorithm is a complete decision
procedure for integrating elementary functions, which means that
given any elementary function, it will either compute an
elementary antiderivative, or else prove that none exists.
Currently, part of transcendental case is implemented, meaning
elementary integrals containing exponentials, logarithms, and
(soon!) trigonometric functions can be computed. The algebraic
case, e.g., functions containing roots, is much more difficult
and is not implemented yet.
- If the routine fails (because the integrand is not elementary, or
because a case is not implemented yet), it continues on to the
next algorithms below. If the routine proves that the integrals
is nonelementary, it still moves on to the algorithms below,
because we might be able to find a closed-form solution in terms
of special functions. If risch=True, however, it will stop here.
4. The Meijer G-Function algorithm:
- This algorithm works by first rewriting the integrand in terms of
very general Meijer G-Function (meijerg in SymPy), integrating
it, and then rewriting the result back, if possible. This
algorithm is particularly powerful for definite integrals (which
is actually part of a different method of Integral), since it can
compute closed-form solutions of definite integrals even when no
closed-form indefinite integral exists. But it also is capable
of computing many indefinite integrals as well.
- Another advantage of this method is that it can use some results
about the Meijer G-Function to give a result in terms of a
Piecewise expression, which allows to express conditionally
convergent integrals.
- Setting meijerg=True will cause integrate() to use only this
method.
5. The "manual integration" algorithm:
- This algorithm tries to mimic how a person would find an
antiderivative by hand, for example by looking for a
substitution or applying integration by parts. This algorithm
does not handle as many integrands but can return results in a
more familiar form.
- Sometimes this algorithm can evaluate parts of an integral; in
this case integrate() will try to evaluate the rest of the
integrand using the other methods here.
- Setting manual=True will cause integrate() to use only this
method.
6. The Heuristic Risch algorithm:
- This is a heuristic version of the Risch algorithm, meaning that
it is not deterministic. This is tried as a last resort because
it can be very slow. It is still used because not enough of the
full Risch algorithm is implemented, so that there are still some
integrals that can only be computed using this method. The goal
is to implement enough of the Risch and Meijer G methods so that
this can be deleted.
"""
from sympy.integrals.risch import risch_integrate
if risch:
try:
return risch_integrate(f, x, conds=conds)
except NotImplementedError:
return None
if manual:
try:
result = manualintegrate(f, x)
if result is not None and result.func != Integral:
return result
except (ValueError, PolynomialError):
pass
# if it is a poly(x) then let the polynomial integrate itself (fast)
#
# It is important to make this check first, otherwise the other code
# will return a sympy expression instead of a Polynomial.
#
# see Polynomial for details.
if isinstance(f, Poly) and not meijerg:
return f.integrate(x)
# Piecewise antiderivatives need to call special integrate.
if f.func is Piecewise:
return f._eval_integral(x)
# let's cut it short if `f` does not depend on `x`
if not f.has(x):
return f*x
# try to convert to poly(x) and then integrate if successful (fast)
poly = f.as_poly(x)
if poly is not None and not meijerg:
return poly.integrate().as_expr()
if risch is not False:
try:
result, i = risch_integrate(f, x, separate_integral=True, conds=conds)
except NotImplementedError:
pass
else:
if i:
# There was a nonelementary integral. Try integrating it.
return result + i.doit(risch=False)
else:
return result
# since Integral(f=g1+g2+...) == Integral(g1) + Integral(g2) + ...
# we are going to handle Add terms separately,
# if `f` is not Add -- we only have one term
# Note that in general, this is a bad idea, because Integral(g1) +
# Integral(g2) might not be computable, even if Integral(g1 + g2) is.
# For example, Integral(x**x + x**x*log(x)). But many heuristics only
# work term-wise. So we compute this step last, after trying
# risch_integrate. We also try risch_integrate again in this loop,
# because maybe the integral is a sum of an elementary part and a
# nonelementary part (like erf(x) + exp(x)). risch_integrate() is
# quite fast, so this is acceptable.
parts = []
args = Add.make_args(f)
for g in args:
coeff, g = g.as_independent(x)
# g(x) = const
if g is S.One and not meijerg:
parts.append(coeff*x)
continue
# g(x) = expr + O(x**n)
order_term = g.getO()
if order_term is not None:
h = self._eval_integral(g.removeO(), x)
if h is not None:
h_order_expr = self._eval_integral(order_term.expr, x)
if h_order_expr is not None:
h_order_term = order_term.func(
h_order_expr, *order_term.variables)
parts.append(coeff*(h + h_order_term))
continue
# NOTE: if there is O(x**n) and we fail to integrate then there is
# no point in trying other methods because they will fail anyway.
return None
# c
# g(x) = (a*x+b)
if g.is_Pow and not g.exp.has(x) and not meijerg:
a = Wild('a', exclude=[x])
b = Wild('b', exclude=[x])
M = g.base.match(a*x + b)
if M is not None:
if g.exp == -1:
h = C.log(g.base)
elif conds != 'piecewise':
h = g.base**(g.exp + 1) / (g.exp + 1)
else:
h1 = C.log(g.base)
h2 = g.base**(g.exp + 1) / (g.exp + 1)
h = Piecewise((h1, Eq(g.exp, -1)), (h2, True))
parts.append(coeff * h / M[a])
continue
# poly(x)
# g(x) = -------
# poly(x)
if g.is_rational_function(x) and not meijerg:
parts.append(coeff * ratint(g, x))
continue
if not meijerg:
# g(x) = Mul(trig)
h = trigintegrate(g, x, conds=conds)
if h is not None:
parts.append(coeff * h)
continue
# g(x) has at least a DiracDelta term
h = deltaintegrate(g, x)
if h is not None:
parts.append(coeff * h)
continue
# Try risch again.
if risch is not False:
try:
h, i = risch_integrate(g, x, separate_integral=True, conds=conds)
except NotImplementedError:
h = None
else:
if i:
h = h + i.doit(risch=False)
parts.append(coeff*h)
continue
# fall back to heurisch
try:
if conds == 'piecewise':
h = heurisch_wrapper(g, x, hints=[])
else:
h = heurisch(g, x, hints=[])
except PolynomialError:
# XXX: this exception means there is a bug in the
# implementation of heuristic Risch integration
# algorithm.
h = None
else:
h = None
if meijerg is not False and h is None:
# rewrite using G functions
try:
h = meijerint_indefinite(g, x)
except NotImplementedError:
from sympy.integrals.meijerint import _debug
_debug('NotImplementedError from meijerint_definite')
res = None
if h is not None:
parts.append(coeff * h)
continue
if h is None and manual is not False:
try:
result = manualintegrate(g, x)
if result is not None and not isinstance(result, Integral):
if result.has(Integral):
# try to have other algorithms do the integrals
# manualintegrate can't handle
result = result.func(*[
arg.doit(manual=False) if arg.has(Integral) else arg
for arg in result.args
]).expand(multinomial=False,
log=False,
power_exp=False,
power_base=False)
if not result.has(Integral):
parts.append(coeff * result)
continue
except (ValueError, PolynomialError):
# can't handle some SymPy expressions
pass
# if we failed maybe it was because we had
# a product that could have been expanded,
# so let's try an expansion of the whole
# thing before giving up; we don't try this
# at the outset because there are things
# that cannot be solved unless they are
# NOT expanded e.g., x**x*(1+log(x)). There
# should probably be a checker somewhere in this
# routine to look for such cases and try to do
# collection on the expressions if they are already
# in an expanded form
if not h and len(args) == 1:
f = f.expand(mul=True, deep=False)
if f.is_Add:
# Note: risch will be identical on the expanded
# expression, but maybe it will be able to pick out parts,
# like x*(exp(x) + erf(x)).
return self._eval_integral(f, x, meijerg=meijerg, risch=risch, conds=conds)
if h is not None:
parts.append(coeff * h)
else:
return None
return Add(*parts)
def _eval_lseries(self, x, logx):
expr = self.as_dummy()
symb = x
for l in expr.limits:
if x in l[1:]:
symb = l[0]
break
for term in expr.function.lseries(symb, logx):
yield integrate(term, *expr.limits)
def _eval_nseries(self, x, n, logx):
expr = self.as_dummy()
symb = x
for l in expr.limits:
if x in l[1:]:
symb = l[0]
break
terms, order = expr.function.nseries(
x=symb, n=n, logx=logx).as_coeff_add(C.Order)
return integrate(terms, *expr.limits) + Add(*order)*x
def as_sum(self, n, method="midpoint"):
"""
Approximates the definite integral by a sum.
method ... one of: left, right, midpoint, trapezoid
These are all basically the rectangle method [1], the only difference
is where the function value is taken in each interval to define the
rectangle.
[1] http://en.wikipedia.org/wiki/Rectangle_method
Examples
========
>>> from sympy import sin, sqrt
>>> from sympy.abc import x
>>> from sympy.integrals import Integral
>>> e = Integral(sin(x), (x, 3, 7))
>>> e
Integral(sin(x), (x, 3, 7))
For demonstration purposes, this interval will only be split into 2
regions, bounded by [3, 5] and [5, 7].
The left-hand rule uses function evaluations at the left of each
interval:
>>> e.as_sum(2, 'left')
2*sin(5) + 2*sin(3)
The midpoint rule uses evaluations at the center of each interval:
>>> e.as_sum(2, 'midpoint')
2*sin(4) + 2*sin(6)
The right-hand rule uses function evaluations at the right of each
interval:
>>> e.as_sum(2, 'right')
2*sin(5) + 2*sin(7)
The trapezoid rule uses function evaluations on both sides of the
intervals. This is equivalent to taking the average of the left and
right hand rule results:
>>> e.as_sum(2, 'trapezoid')
2*sin(5) + sin(3) + sin(7)
>>> (e.as_sum(2, 'left') + e.as_sum(2, 'right'))/2 == _
True
All but the trapexoid method may be used when dealing with a function
with a discontinuity. Here, the discontinuity at x = 0 can be avoided
by using the midpoint or right-hand method:
>>> e = Integral(1/sqrt(x), (x, 0, 1))
>>> e.as_sum(5).n(4)
1.730
>>> e.as_sum(10).n(4)
1.809
>>> e.doit().n(4) # the actual value is 2
2.000
The left- or trapezoid method will encounter the discontinuity and
return oo:
>>> e.as_sum(5, 'left')
oo
>>> e.as_sum(5, 'trapezoid')
oo
See Also
========
Integral.doit : Perform the integration using any hints
"""
limits = self.limits
if len(limits) > 1:
raise NotImplementedError(
"Multidimensional midpoint rule not implemented yet")
else:
limit = limits[0]
if len(limit) != 3:
raise ValueError("Expecting a definite integral.")
if n <= 0:
raise ValueError("n must be > 0")
if n == oo:
raise NotImplementedError("Infinite summation not yet implemented")
sym, lower_limit, upper_limit = limit
dx = (upper_limit - lower_limit)/n
if method == 'trapezoid':
l = self.function.limit(sym, lower_limit)
r = self.function.limit(sym, upper_limit, "-")
result = (l + r)/2
for i in range(1, n):
x = lower_limit + i*dx
result += self.function.subs(sym, x)
return result*dx
elif method not in ('left', 'right', 'midpoint'):
raise NotImplementedError("Unknown method %s" % method)
result = 0
for i in range(n):
if method == "midpoint":
xi = lower_limit + i*dx + dx/2
elif method == "left":
xi = lower_limit + i*dx
if i == 0:
result = self.function.limit(sym, lower_limit)
continue
elif method == "right":
xi = lower_limit + i*dx + dx
if i == n:
result += self.function.limit(sym, upper_limit, "-")
continue
result += self.function.subs(sym, xi)
return result*dx
@xthreaded
def integrate(*args, **kwargs):
"""integrate(f, var, ...)
Compute definite or indefinite integral of one or more variables
using Risch-Norman algorithm and table lookup. This procedure is
able to handle elementary algebraic and transcendental functions
and also a huge class of special functions, including Airy,
Bessel, Whittaker and Lambert.
var can be:
- a symbol -- indefinite integration
- a tuple (symbol, a) -- indefinite integration with result
given with `a` replacing `symbol`
- a tuple (symbol, a, b) -- definite integration
Several variables can be specified, in which case the result is
multiple integration. (If var is omitted and the integrand is
univariate, the indefinite integral in that variable will be performed.)
Indefinite integrals are returned without terms that are independent
of the integration variables. (see examples)
Definite improper integrals often entail delicate convergence
conditions. Pass conds='piecewise', 'separate' or 'none' to have
these returned, respectively, as a Piecewise function, as a separate
result (i.e. result will be a tuple), or not at all (default is
'piecewise').
**Strategy**
SymPy uses various approaches to definite integration. One method is to
find an antiderivative for the integrand, and then use the fundamental
theorem of calculus. Various functions are implemented to integrate
polynomial, rational and trigonometric functions, and integrands
containing DiracDelta terms.
SymPy also implements the part of the Risch algorithm, which is a decision
procedure for integrating elementary functions, i.e., the algorithm can
either find an elementary antiderivative, or prove that one does not
exist. There is also a (very successful, albeit somewhat slow) general
implementation of the heuristic Risch algorithm. This algorithm will
eventually be phased out as more of the full Risch algorithm is
implemented. See the docstring of Integral._eval_integral() for more
details on computing the antiderivative using algebraic methods.
The option risch=True can be used to use only the (full) Risch algorithm.
This is useful if you want to know if an elementary function has an
elementary antiderivative. If the indefinite Integral returned by this
function is an instance of NonElementaryIntegral, that means that the
Risch algorithm has proven that integral to be non-elementary. Note that
by default, additional methods (such as the Meijer G method outlined
below) are tried on these integrals, as they may be expressible in terms
of special functions, so if you only care about elementary answers, use
risch=True. Also note that an unevaluated Integral returned by this
function is not necessarily a NonElementaryIntegral, even with risch=True,
as it may just be an indication that the particular part of the Risch
algorithm needed to integrate that function is not yet implemented.
Another family of strategies comes from re-writing the integrand in
terms of so-called Meijer G-functions. Indefinite integrals of a
single G-function can always be computed, and the definite integral
of a product of two G-functions can be computed from zero to
infinity. Various strategies are implemented to rewrite integrands
as G-functions, and use this information to compute integrals (see
the ``meijerint`` module).
The option manual=True can be used to use only an algorithm that tries
to mimic integration by hand. This algorithm does not handle as many
integrands as the other algorithms implemented but may return results in
a more familiar form. The ``manualintegrate`` module has functions that
return the steps used (see the module docstring for more information).
In general, the algebraic methods work best for computing
antiderivatives of (possibly complicated) combinations of elementary
functions. The G-function methods work best for computing definite
integrals from zero to infinity of moderately complicated
combinations of special functions, or indefinite integrals of very
simple combinations of special functions.
The strategy employed by the integration code is as follows:
- If computing a definite integral, and both limits are real,
and at least one limit is +- oo, try the G-function method of
definite integration first.
- Try to find an antiderivative, using all available methods, ordered
by performance (that is try fastest method first, slowest last; in
particular polynomial integration is tried first, meijer
g-functions second to last, and heuristic risch last).
- If still not successful, try G-functions irrespective of the
limits.
The option meijerg=True, False, None can be used to, respectively:
always use G-function methods and no others, never use G-function
methods, or use all available methods (in order as described above).
It defaults to None.
Examples
========
>>> from sympy import integrate, log, exp, oo
>>> from sympy.abc import a, x, y
>>> integrate(x*y, x)
x**2*y/2
>>> integrate(log(x), x)
x*log(x) - x
>>> integrate(log(x), (x, 1, a))
a*log(a) - a + 1
>>> integrate(x)
x**2/2
Terms that are independent of x are dropped by indefinite integration:
>>> from sympy import sqrt
>>> integrate(sqrt(1 + x), (x, 0, x))
2*(x + 1)**(3/2)/3 - 2/3
>>> integrate(sqrt(1 + x), x)
2*(x + 1)**(3/2)/3
>>> integrate(x*y)
Traceback (most recent call last):
...
ValueError: specify integration variables to integrate x*y
Note that ``integrate(x)`` syntax is meant only for convenience
in interactive sessions and should be avoided in library code.
>>> integrate(x**a*exp(-x), (x, 0, oo)) # same as conds='piecewise'
Piecewise((gamma(a + 1), -re(a) < 1),
(Integral(x**a*exp(-x), (x, 0, oo)), True))
>>> integrate(x**a*exp(-x), (x, 0, oo), conds='none')
gamma(a + 1)
>>> integrate(x**a*exp(-x), (x, 0, oo), conds='separate')
(gamma(a + 1), -re(a) < 1)
See Also
========
Integral, Integral.doit
"""
meijerg = kwargs.pop('meijerg', None)
conds = kwargs.pop('conds', 'piecewise')
risch = kwargs.pop('risch', None)
manual = kwargs.pop('manual', None)
integral = Integral(*args, **kwargs)
if isinstance(integral, Integral):
return integral.doit(deep=False, meijerg=meijerg, conds=conds,
risch=risch, manual=manual)
else:
return integral
@xthreaded
def line_integrate(field, curve, vars):
"""line_integrate(field, Curve, variables)
Compute the line integral.
Examples
========
>>> from sympy import Curve, line_integrate, E, ln
>>> from sympy.abc import x, y, t
>>> C = Curve([E**t + 1, E**t - 1], (t, 0, ln(2)))
>>> line_integrate(x + y, C, [x, y])
3*sqrt(2)
See Also
========
integrate, Integral
"""
F = sympify(field)
if not F:
raise ValueError(
"Expecting function specifying field as first argument.")
if not isinstance(curve, Curve):
raise ValueError("Expecting Curve entity as second argument.")
if not is_sequence(vars):
raise ValueError("Expecting ordered iterable for variables.")
if len(curve.functions) != len(vars):
raise ValueError("Field variable size does not match curve dimension.")
if curve.parameter in vars:
raise ValueError("Curve parameter clashes with field parameters.")
# Calculate derivatives for line parameter functions
# F(r) -> F(r(t)) and finally F(r(t)*r'(t))
Ft = F
dldt = 0
for i, var in enumerate(vars):
_f = curve.functions[i]
_dn = diff(_f, curve.parameter)
# ...arc length
dldt = dldt + (_dn * _dn)
Ft = Ft.subs(var, _f)
Ft = Ft * sqrt(dldt)
integral = Integral(Ft, curve.limits).doit(deep=False)
return integral
| bsd-3-clause |
rishiloyola/bedrock | bedrock/press/forms.py | 19 | 6883 | # coding: utf-8
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from django import forms
from lib.l10n_utils.dotlang import _, _lazy
from bedrock.mozorg.forms import (DateInput, EmailInput, HoneyPotWidget,
NumberInput, TelInput, TimeInput, URLInput)
SPEAKER_REQUEST_FILE_SIZE_LIMIT = 5242880 # 5MB
class SpeakerRequestForm(forms.Form):
# event fields
sr_event_name = forms.CharField(
max_length=255,
required=True,
error_messages={
'required': _lazy(u'Please enter a name for the event.'),
},
widget=forms.TextInput(
attrs={
'class': 'required',
'required': 'required',
'aria-required': 'true',
}
),
)
sr_event_url = forms.URLField(
max_length=2000,
required=True,
error_messages={
'required': _lazy(u'Please enter a URL.'),
'invalid': _lazy(u'Please enter a valid URL.'),
},
widget=URLInput(
attrs={
'class': 'required',
'required': 'required',
'aria-required': 'true',
'placeholder': _lazy(u'http://www.my-event.com'),
}
),
)
sr_event_date = forms.CharField(
required=True,
error_messages={
'required': _lazy(u'Please provide a date.'),
},
widget=DateInput(
attrs={
'class': 'required',
'required': 'required',
'aria-required': 'true',
}
),
)
sr_event_time = forms.CharField(
required=True,
error_messages={
'required': _lazy(u'Please provide a time.'),
},
widget=TimeInput(
attrs={
'class': 'required',
'required': 'required',
'aria-required': 'true',
}
),
)
sr_guest_speaker1 = forms.CharField(
max_length=200,
required=False,
)
sr_guest_speaker2 = forms.CharField(
max_length=200,
required=False,
)
# contact fields
sr_contact_name = forms.CharField(
max_length=200,
required=True,
widget=forms.TextInput(
attrs={
'required': 'required',
'class': 'required',
'aria-required': 'true',
}
),
)
sr_contact_title = forms.CharField(
max_length=200,
required=False,
)
sr_contact_company = forms.CharField(
max_length=200,
required=False,
)
sr_contact_phone = forms.CharField(
max_length=50,
required=False,
widget=TelInput(),
)
sr_contact_email = forms.EmailField(
max_length=254, # max length allowed for emails
required=True,
error_messages={
'invalid': _lazy(u'Please enter a valid email address'),
},
widget=EmailInput(
attrs={
'required': 'required',
'class': 'required',
'aria-required': 'true',
}
),
)
sr_contact_company_url = forms.URLField(
max_length=2000,
required=False,
widget=forms.TextInput(
attrs={
'placeholder': _lazy(u'http://www.my-company.com'),
}
),
)
# event details fields
sr_event_venue = forms.CharField(
max_length=400,
required=False,
)
sr_event_theme = forms.CharField(
max_length=200,
required=False,
)
sr_event_goal = forms.CharField(
max_length=300,
required=False,
)
sr_event_format = forms.CharField(
max_length=200,
required=False,
)
sr_event_audience_size = forms.IntegerField(
required=False,
widget=NumberInput(
attrs={
'min': 1,
'placeholder': 25,
}
),
)
sr_event_audience_demographics = forms.CharField(
max_length=500,
required=False,
widget=forms.Textarea(),
)
sr_event_speakers_confirmed = forms.CharField(
max_length=500,
required=False,
widget=forms.Textarea(),
)
sr_event_speakers_invited = forms.CharField(
max_length=500,
required=False,
widget=forms.Textarea(),
)
sr_event_speakers_past = forms.CharField(
max_length=1000,
required=False,
widget=forms.Textarea(),
)
sr_event_media_coverage = forms.CharField(
max_length=500,
required=False,
widget=forms.Textarea(),
)
sr_event_sponsors = forms.CharField(
max_length=500,
required=False,
widget=forms.Textarea(),
)
sr_event_confirmation_deadline = forms.DateField(
required=False,
widget=DateInput(),
)
# presentation details fields
sr_presentation_type = forms.MultipleChoiceField(
required=False,
choices=(
('keynote', _lazy(u'Keynote')),
('presentation', _lazy(u'Presentation')),
('fireside chat', _lazy(u'Fireside Chat')),
('panel', _lazy(u'Panel')),
('other', _lazy(u'Other')),
),
widget=forms.CheckboxSelectMultiple(),
)
sr_presentation_panelists = forms.CharField(
max_length=500,
required=False,
widget=forms.Textarea(),
)
sr_presentation_topic = forms.CharField(
required=False,
max_length=255,
)
sr_presentation_length = forms.IntegerField(
required=False,
widget=NumberInput(
attrs={
'min': 0.5,
'step': 0.5,
'placeholder': 2.5,
}
)
)
# additional info fields
sr_attachment = forms.FileField(
required=False,
)
# honeypot
office_fax = forms.CharField(widget=HoneyPotWidget, required=False)
def clean_sr_attachment(self):
cleaned_data = super(SpeakerRequestForm, self).clean()
attachment = cleaned_data.get("sr_attachment")
if attachment:
if attachment._size > SPEAKER_REQUEST_FILE_SIZE_LIMIT:
raise forms.ValidationError(
_("Attachment must not exceed 5MB"))
return attachment
def clean_office_fax(self):
cleaned_data = super(SpeakerRequestForm, self).clean()
honeypot = cleaned_data.pop('office_fax', None)
if honeypot:
raise forms.ValidationError(
_('Your submission could not be processed'))
| mpl-2.0 |
xiandiancloud/edxplaltfom-xusong | lms/djangoapps/shoppingcart/migrations/0005_auto__add_paidcourseregistrationannotation__add_field_orderitem_report.py | 58 | 9807 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'PaidCourseRegistrationAnnotation'
db.create_table('shoppingcart_paidcourseregistrationannotation', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('course_id', self.gf('django.db.models.fields.CharField')(unique=True, max_length=128, db_index=True)),
('annotation', self.gf('django.db.models.fields.TextField')(null=True)),
))
db.send_create_signal('shoppingcart', ['PaidCourseRegistrationAnnotation'])
# Adding field 'OrderItem.report_comments'
db.add_column('shoppingcart_orderitem', 'report_comments',
self.gf('django.db.models.fields.TextField')(default=''),
keep_default=False)
def backwards(self, orm):
# Deleting model 'PaidCourseRegistrationAnnotation'
db.delete_table('shoppingcart_paidcourseregistrationannotation')
# Deleting field 'OrderItem.report_comments'
db.delete_column('shoppingcart_orderitem', 'report_comments')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'shoppingcart.certificateitem': {
'Meta': {'object_name': 'CertificateItem', '_ormbases': ['shoppingcart.OrderItem']},
'course_enrollment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['student.CourseEnrollment']"}),
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'mode': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'})
},
'shoppingcart.order': {
'Meta': {'object_name': 'Order'},
'bill_to_cardtype': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'bill_to_ccnum': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'bill_to_city': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_country': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_first': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_last': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_postalcode': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'bill_to_state': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'bill_to_street1': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'bill_to_street2': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'processor_reply_dump': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'purchase_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'cart'", 'max_length': '32'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.orderitem': {
'Meta': {'object_name': 'OrderItem'},
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'fulfilled_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_desc': ('django.db.models.fields.CharField', [], {'default': "'Misc. Item'", 'max_length': '1024'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']"}),
'qty': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'report_comments': ('django.db.models.fields.TextField', [], {'default': "''"}),
'status': ('django.db.models.fields.CharField', [], {'default': "'cart'", 'max_length': '32'}),
'unit_cost': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '30', 'decimal_places': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.paidcourseregistration': {
'Meta': {'object_name': 'PaidCourseRegistration', '_ormbases': ['shoppingcart.OrderItem']},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'mode': ('django.db.models.fields.SlugField', [], {'default': "'honor'", 'max_length': '50'}),
'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'})
},
'shoppingcart.paidcourseregistrationannotation': {
'Meta': {'object_name': 'PaidCourseRegistrationAnnotation'},
'annotation': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'course_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'student.courseenrollment': {
'Meta': {'ordering': "('user', 'course_id')", 'unique_together': "(('user', 'course_id'),)", 'object_name': 'CourseEnrollment'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mode': ('django.db.models.fields.CharField', [], {'default': "'honor'", 'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['shoppingcart'] | agpl-3.0 |
intgr/django | django/conf/locale/nb/formats.py | 65 | 1699 | # This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. F Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j. F Y H:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y H:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# Kept ISO formats as they are in first position
DATE_INPUT_FORMATS = [
'%Y-%m-%d', '%d.%m.%Y', '%d.%m.%y', # '2006-10-25', '25.10.2006', '25.10.06'
# '%d. %b %Y', '%d %b %Y', # '25. okt 2006', '25 okt 2006'
# '%d. %b. %Y', '%d %b. %Y', # '25. okt. 2006', '25 okt. 2006'
# '%d. %B %Y', '%d %B %Y', # '25. oktober 2006', '25 oktober 2006'
]
DATETIME_INPUT_FORMATS = [
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
'%d.%m.%y %H:%M:%S', # '25.10.06 14:30:59'
'%d.%m.%y %H:%M:%S.%f', # '25.10.06 14:30:59.000200'
'%d.%m.%y %H:%M', # '25.10.06 14:30'
'%d.%m.%y', # '25.10.06'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
NUMBER_GROUPING = 3
| bsd-3-clause |
SickRage/SickRage | sickrage/libs/trakt/core/emitter.py | 7 | 6294 | from __future__ import absolute_import, division, print_function
import logging
# concurrent.futures is optional
try:
from concurrent.futures import ThreadPoolExecutor
except ImportError:
ThreadPoolExecutor = None
log = logging.getLogger(__name__)
class Emitter(object):
threading = False
threading_workers = 2
__constructed = False
__name = None
__callbacks = None
__threading_pool = None
def __ensure_constructed(self):
if self.__constructed:
return
self.__callbacks = {}
self.__constructed = True
if self.threading:
if ThreadPoolExecutor is None:
raise Exception('concurrent.futures is required for threading')
self.__threading_pool = ThreadPoolExecutor(max_workers=self.threading_workers)
def __log(self, message, *args, **kwargs):
if self.__name is None:
self.__name = '%s.%s' % (
self.__module__,
self.__class__.__name__
)
log.debug(
('[%s]:' % self.__name.ljust(34)) + str(message),
*args, **kwargs
)
def __wrap(self, callback, *args, **kwargs):
def wrap(func):
callback(func=func, *args, **kwargs)
return func
return wrap
def on(self, events, func=None, on_bound=None):
if not func:
# assume decorator, wrap
return self.__wrap(self.on, events, on_bound=on_bound)
if not isinstance(events, (list, tuple)):
events = [events]
self.__log('on(events: %s, func: %s)', repr(events), repr(func))
self.__ensure_constructed()
for event in events:
if event not in self.__callbacks:
self.__callbacks[event] = []
# Bind callback to event
self.__callbacks[event].append(func)
# Call 'on_bound' callback
if on_bound:
self.__call(on_bound, kwargs={
'func': func
})
return self
def once(self, event, func=None):
if not func:
# assume decorator, wrap
return self.__wrap(self.once, event)
self.__log('once(event: %s, func: %s)', repr(event), repr(func))
def once_callback(*args, **kwargs):
self.off(event, once_callback)
func(*args, **kwargs)
self.on(event, once_callback)
return self
def off(self, event=None, func=None):
self.__log('off(event: %s, func: %s)', repr(event), repr(func))
self.__ensure_constructed()
if event and event not in self.__callbacks:
return self
if func and func not in self.__callbacks[event]:
return self
if event and func:
self.__callbacks[event].remove(func)
elif event:
self.__callbacks[event] = []
elif func:
raise ValueError('"event" is required if "func" is specified')
else:
self.__callbacks = {}
return self
def emit(self, event, *args, **kwargs):
suppress = kwargs.pop('__suppress', False)
if not suppress:
self.__log('emit(event: %s, args: %s, kwargs: %s)', repr(event), repr_trim(args), repr_trim(kwargs))
self.__ensure_constructed()
if event not in self.__callbacks:
return
for callback in list(self.__callbacks[event]):
self.__call(callback, args, kwargs, event)
return self
def emit_on(self, event, *args, **kwargs):
func = kwargs.pop('func', None)
if not func:
# assume decorator, wrap
return self.__wrap(self.emit_on, event, *args, **kwargs)
self.__log('emit_on(event: %s, func: %s, args: %s, kwargs: %s)',
repr(event), repr(func), repr(args), repr(kwargs))
# Bind func from wrapper
self.on(event, func)
# Emit event (calling 'func')
self.emit(event, *args, **kwargs)
def pipe(self, events, other):
if type(events) is not list:
events = [events]
self.__log('pipe(events: %s, other: %s)', repr(events), repr(other))
self.__ensure_constructed()
for event in events:
self.on(event, PipeHandler(event, other.emit))
return self
def __call(self, callback, args=None, kwargs=None, event=None):
args = args or ()
kwargs = kwargs or {}
if self.threading:
return self.__call_async(callback, args, kwargs, event)
return self.__call_sync(callback, args, kwargs, event)
@classmethod
def __call_sync(cls, callback, args=None, kwargs=None, event=None):
try:
callback(*args, **kwargs)
return True
except Exception as ex:
log.warn('[%s] Exception raised in: %s - %s' % (event, cls.__function_name(callback), ex), exc_info=True)
return False
def __call_async(self, callback, args=None, kwargs=None, event=None):
self.__threading_pool.submit(self.__call_sync, callback, args, kwargs, event)
@staticmethod
def __function_name(func):
fragments = []
# Try append class name
cls = getattr(func, 'im_class', None)
if cls and hasattr(cls, '__name__'):
fragments.append(cls.__name__)
# Append function name
fragments.append(func.__name__)
return '.'.join(fragments)
class PipeHandler(object):
def __init__(self, event, callback):
self.event = event
self.callback = callback
def __call__(self, *args, **kwargs):
self.callback(self.event, *args, **kwargs)
def on(emitter, event, func=None):
emitter.on(event, func)
return {
'destroy': lambda: emitter.off(event, func)
}
def once(emitter, event, func=None):
return emitter.once(event, func)
def off(emitter, event, func=None):
return emitter.off(event, func)
def emit(emitter, event, *args, **kwargs):
return emitter.emit(event, *args, **kwargs)
def repr_trim(value, length=1000):
value = repr(value)
if len(value) < length:
return value
return '<%s - %s characters>' % (type(value).__name__, len(value))
| gpl-3.0 |
chaluemwut/fbserver | venv/lib/python2.7/site-packages/sklearn/neighbors/base.py | 1 | 24541 | """Base and mixin classes for nearest neighbors"""
# Authors: Jake Vanderplas <[email protected]>
# Fabian Pedregosa <[email protected]>
# Alexandre Gramfort <[email protected]>
# Sparseness support by Lars Buitinck <[email protected]>
# Multi-output support by Arnaud Joly <[email protected]>
#
# License: BSD 3 clause (C) INRIA, University of Amsterdam
import warnings
from abc import ABCMeta, abstractmethod
import numpy as np
from scipy.sparse import csr_matrix, issparse
from .ball_tree import BallTree
from .kd_tree import KDTree
from ..base import BaseEstimator
from ..metrics import pairwise_distances
from ..metrics.pairwise import PAIRWISE_DISTANCE_FUNCTIONS
from ..utils import safe_asarray, atleast2d_or_csr, check_arrays
from ..utils.fixes import argpartition
from ..utils.validation import DataConversionWarning
from ..externals import six
VALID_METRICS = dict(ball_tree=BallTree.valid_metrics,
kd_tree=KDTree.valid_metrics,
# The following list comes from the
# sklearn.metrics.pairwise doc string
brute=(list(PAIRWISE_DISTANCE_FUNCTIONS.keys()) +
['braycurtis', 'canberra', 'chebyshev',
'correlation', 'cosine', 'dice', 'hamming',
'jaccard', 'kulsinski', 'mahalanobis',
'matching', 'minkowski', 'rogerstanimoto',
'russellrao', 'seuclidean', 'sokalmichener',
'sokalsneath', 'sqeuclidean',
'yule', 'wminkowski']))
VALID_METRICS_SPARSE = dict(ball_tree=[],
kd_tree=[],
brute=PAIRWISE_DISTANCE_FUNCTIONS.keys())
class NeighborsWarning(UserWarning):
pass
# Make sure that NeighborsWarning are displayed more than once
warnings.simplefilter("always", NeighborsWarning)
def _check_weights(weights):
"""Check to make sure weights are valid"""
if weights in (None, 'uniform', 'distance'):
return weights
elif callable(weights):
return weights
else:
raise ValueError("weights not recognized: should be 'uniform', "
"'distance', or a callable function")
def _get_weights(dist, weights):
"""Get the weights from an array of distances and a parameter ``weights``
Parameters
===========
dist: ndarray
The input distances
weights: {'uniform', 'distance' or a callable}
The kind of weighting used
Returns
========
weights_arr: array of the same shape as ``dist``
if ``weights == 'uniform'``, then returns None
"""
if weights in (None, 'uniform'):
return None
elif weights == 'distance':
with np.errstate(divide='ignore'):
dist = 1. / dist
return dist
elif callable(weights):
return weights(dist)
else:
raise ValueError("weights not recognized: should be 'uniform', "
"'distance', or a callable function")
class NeighborsBase(six.with_metaclass(ABCMeta, BaseEstimator)):
"""Base class for nearest neighbors estimators."""
@abstractmethod
def __init__(self):
pass
def _init_params(self, n_neighbors=None, radius=None,
algorithm='auto', leaf_size=30, metric='minkowski',
p=2, metric_params=None, **kwargs):
if kwargs:
warnings.warn("Passing additional arguments to the metric "
"function as **kwargs is deprecated "
"and will no longer be supported in 0.18. "
"Use metric_params instead.",
DeprecationWarning, stacklevel=3)
if metric_params is None:
metric_params = {}
metric_params.update(kwargs)
self.n_neighbors = n_neighbors
self.radius = radius
self.algorithm = algorithm
self.leaf_size = leaf_size
self.metric = metric
self.metric_params = metric_params
self.p = p
if algorithm not in ['auto', 'brute',
'kd_tree', 'ball_tree']:
raise ValueError("unrecognized algorithm: '%s'" % algorithm)
if algorithm == 'auto':
alg_check = 'ball_tree'
else:
alg_check = algorithm
if callable(metric):
if algorithm == 'kd_tree':
# callable metric is only valid for brute force and ball_tree
raise ValueError(
"kd_tree algorithm does not support callable metric '%s'"
% metric)
elif metric not in VALID_METRICS[alg_check]:
raise ValueError("Metric '%s' not valid for algorithm '%s'"
% (metric, algorithm))
if self.metric_params is not None and 'p' in self.metric_params:
warnings.warn("Parameter p is found in metric_params. "
"The corresponding parameter from __init__ "
"is ignored.", SyntaxWarning, stacklevel=3)
effective_p = metric_params['p']
else:
effective_p = self.p
if self.metric in ['wminkowski', 'minkowski'] and effective_p < 1:
raise ValueError("p must be greater than one for minkowski metric")
self._fit_X = None
self._tree = None
self._fit_method = None
def _fit(self, X):
if self.metric_params is None:
self.effective_metric_params_ = {}
else:
self.effective_metric_params_ = self.metric_params.copy()
effective_p = self.effective_metric_params_.get('p', self.p)
if self.metric in ['wminkowski', 'minkowski']:
self.effective_metric_params_['p'] = effective_p
self.effective_metric_ = self.metric
# For minkowski distance, use more efficient methods where available
if self.metric == 'minkowski':
p = self.effective_metric_params_.pop('p', 2)
if p < 1:
raise ValueError("p must be greater than one "
"for minkowski metric")
elif p == 1:
self.effective_metric_ = 'manhattan'
elif p == 2:
self.effective_metric_ = 'euclidean'
elif p == np.inf:
self.effective_metric_ = 'chebyshev'
else:
self.effective_metric_params_['p'] = p
if isinstance(X, NeighborsBase):
self._fit_X = X._fit_X
self._tree = X._tree
self._fit_method = X._fit_method
return self
elif isinstance(X, BallTree):
self._fit_X = X.data
self._tree = X
self._fit_method = 'ball_tree'
return self
elif isinstance(X, KDTree):
self._fit_X = X.data
self._tree = X
self._fit_method = 'kd_tree'
return self
X = atleast2d_or_csr(X, copy=False)
n_samples = X.shape[0]
if n_samples == 0:
raise ValueError("n_samples must be greater than 0")
if issparse(X):
if self.algorithm not in ('auto', 'brute'):
warnings.warn("cannot use tree with sparse input: "
"using brute force")
if self.effective_metric_ not in VALID_METRICS_SPARSE['brute']:
raise ValueError("metric '%s' not valid for sparse input"
% self.effective_metric_)
self._fit_X = X.copy()
self._tree = None
self._fit_method = 'brute'
return self
self._fit_method = self.algorithm
self._fit_X = X
if self._fit_method == 'auto':
# A tree approach is better for small number of neighbors,
# and KDTree is generally faster when available
if (self.n_neighbors is None
or self.n_neighbors < self._fit_X.shape[0] // 2):
if self.effective_metric_ in VALID_METRICS['kd_tree']:
self._fit_method = 'kd_tree'
else:
self._fit_method = 'ball_tree'
else:
self._fit_method = 'brute'
if self._fit_method == 'ball_tree':
self._tree = BallTree(X, self.leaf_size,
metric=self.effective_metric_,
**self.effective_metric_params_)
elif self._fit_method == 'kd_tree':
self._tree = KDTree(X, self.leaf_size,
metric=self.effective_metric_,
**self.effective_metric_params_)
elif self._fit_method == 'brute':
self._tree = None
else:
raise ValueError("algorithm = '%s' not recognized"
% self.algorithm)
return self
class KNeighborsMixin(object):
"""Mixin for k-neighbors searches"""
def kneighbors(self, X, n_neighbors=None, return_distance=True):
"""Finds the K-neighbors of a point.
Returns distance
Parameters
----------
X : array-like, last dimension same as that of fit data
The new point.
n_neighbors : int
Number of neighbors to get (default is the value
passed to the constructor).
return_distance : boolean, optional. Defaults to True.
If False, distances will not be returned
Returns
-------
dist : array
Array representing the lengths to point, only present if
return_distance=True
ind : array
Indices of the nearest points in the population matrix.
Examples
--------
In the following example, we construct a NeighborsClassifier
class from an array representing our data set and ask who's
the closest point to [1,1,1]
>>> samples = [[0., 0., 0.], [0., .5, 0.], [1., 1., .5]]
>>> from sklearn.neighbors import NearestNeighbors
>>> neigh = NearestNeighbors(n_neighbors=1)
>>> neigh.fit(samples) # doctest: +ELLIPSIS
NearestNeighbors(algorithm='auto', leaf_size=30, ...)
>>> print(neigh.kneighbors([1., 1., 1.])) # doctest: +ELLIPSIS
(array([[ 0.5]]), array([[2]]...))
As you can see, it returns [[0.5]], and [[2]], which means that the
element is at distance 0.5 and is the third element of samples
(indexes start at 0). You can also query for multiple points:
>>> X = [[0., 1., 0.], [1., 0., 1.]]
>>> neigh.kneighbors(X, return_distance=False) # doctest: +ELLIPSIS
array([[1],
[2]]...)
"""
if self._fit_method is None:
raise ValueError("must fit neighbors before querying")
X = atleast2d_or_csr(X)
if n_neighbors is None:
n_neighbors = self.n_neighbors
if self._fit_method == 'brute':
# for efficiency, use squared euclidean distances
if self.effective_metric_ == 'euclidean':
dist = pairwise_distances(X, self._fit_X, 'euclidean',
squared=True)
else:
dist = pairwise_distances(X, self._fit_X,
self.effective_metric_,
**self.effective_metric_params_)
neigh_ind = argpartition(dist, n_neighbors - 1, axis=1)
neigh_ind = neigh_ind[:, :n_neighbors]
# argpartition doesn't guarantee sorted order, so we sort again
j = np.arange(neigh_ind.shape[0])[:, None]
neigh_ind = neigh_ind[j, np.argsort(dist[j, neigh_ind])]
if return_distance:
if self.effective_metric_ == 'euclidean':
return np.sqrt(dist[j, neigh_ind]), neigh_ind
else:
return dist[j, neigh_ind], neigh_ind
else:
return neigh_ind
elif self._fit_method in ['ball_tree', 'kd_tree']:
result = self._tree.query(X, n_neighbors,
return_distance=return_distance)
return result
else:
raise ValueError("internal: _fit_method not recognized")
def kneighbors_graph(self, X, n_neighbors=None,
mode='connectivity'):
"""Computes the (weighted) graph of k-Neighbors for points in X
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Sample data
n_neighbors : int
Number of neighbors for each sample.
(default is value passed to the constructor).
mode : {'connectivity', 'distance'}, optional
Type of returned matrix: 'connectivity' will return the
connectivity matrix with ones and zeros, in 'distance' the
edges are Euclidean distance between points.
Returns
-------
A : sparse matrix in CSR format, shape = [n_samples, n_samples_fit]
n_samples_fit is the number of samples in the fitted data
A[i, j] is assigned the weight of edge that connects i to j.
Examples
--------
>>> X = [[0], [3], [1]]
>>> from sklearn.neighbors import NearestNeighbors
>>> neigh = NearestNeighbors(n_neighbors=2)
>>> neigh.fit(X) # doctest: +ELLIPSIS
NearestNeighbors(algorithm='auto', leaf_size=30, ...)
>>> A = neigh.kneighbors_graph(X)
>>> A.toarray()
array([[ 1., 0., 1.],
[ 0., 1., 1.],
[ 1., 0., 1.]])
See also
--------
NearestNeighbors.radius_neighbors_graph
"""
X = safe_asarray(X)
if n_neighbors is None:
n_neighbors = self.n_neighbors
n_samples1 = X.shape[0]
n_samples2 = self._fit_X.shape[0]
n_nonzero = n_samples1 * n_neighbors
A_indptr = np.arange(0, n_nonzero + 1, n_neighbors)
# construct CSR matrix representation of the k-NN graph
if mode == 'connectivity':
A_data = np.ones((n_samples1, n_neighbors))
A_ind = self.kneighbors(X, n_neighbors, return_distance=False)
elif mode == 'distance':
data, ind = self.kneighbors(X, n_neighbors + 1,
return_distance=True)
A_data, A_ind = data[:, 1:], ind[:, 1:]
else:
raise ValueError(
'Unsupported mode, must be one of "connectivity" '
'or "distance" but got "%s" instead' % mode)
return csr_matrix((A_data.ravel(), A_ind.ravel(), A_indptr),
shape=(n_samples1, n_samples2))
class RadiusNeighborsMixin(object):
"""Mixin for radius-based neighbors searches"""
def radius_neighbors(self, X, radius=None, return_distance=True):
"""Finds the neighbors within a given radius of a point or points.
Returns indices of and distances to the neighbors of each point.
Parameters
----------
X : array-like, last dimension same as that of fit data
The new point or points
radius : float
Limiting distance of neighbors to return.
(default is the value passed to the constructor).
return_distance : boolean, optional. Defaults to True.
If False, distances will not be returned
Returns
-------
dist : array
Array representing the euclidean distances to each point,
only present if return_distance=True.
ind : array
Indices of the nearest points in the population matrix.
Examples
--------
In the following example, we construct a NeighborsClassifier
class from an array representing our data set and ask who's
the closest point to [1,1,1]
>>> samples = [[0., 0., 0.], [0., .5, 0.], [1., 1., .5]]
>>> from sklearn.neighbors import NearestNeighbors
>>> neigh = NearestNeighbors(radius=1.6)
>>> neigh.fit(samples) # doctest: +ELLIPSIS
NearestNeighbors(algorithm='auto', leaf_size=30, ...)
>>> print(neigh.radius_neighbors([1., 1., 1.])) # doctest: +ELLIPSIS
(array([[ 1.5, 0.5]]...), array([[1, 2]]...)
The first array returned contains the distances to all points which
are closer than 1.6, while the second array returned contains their
indices. In general, multiple points can be queried at the same time.
Notes
-----
Because the number of neighbors of each point is not necessarily
equal, the results for multiple query points cannot be fit in a
standard data array.
For efficiency, `radius_neighbors` returns arrays of objects, where
each object is a 1D array of indices or distances.
"""
if self._fit_method is None:
raise ValueError("must fit neighbors before querying")
X = atleast2d_or_csr(X)
if radius is None:
radius = self.radius
if self._fit_method == 'brute':
# for efficiency, use squared euclidean distances
if self.effective_metric_ == 'euclidean':
dist = pairwise_distances(X, self._fit_X, 'euclidean',
squared=True)
radius *= radius
else:
dist = pairwise_distances(X, self._fit_X,
self.effective_metric_,
**self.effective_metric_params_)
neigh_ind = [np.where(d < radius)[0] for d in dist]
# if there are the same number of neighbors for each point,
# we can do a normal array. Otherwise, we return an object
# array with elements that are numpy arrays
try:
neigh_ind = np.asarray(neigh_ind, dtype=int)
dtype_F = float
except ValueError:
neigh_ind = np.asarray(neigh_ind, dtype='object')
dtype_F = object
if return_distance:
if self.effective_metric_ == 'euclidean':
dist = np.array([np.sqrt(d[neigh_ind[i]])
for i, d in enumerate(dist)],
dtype=dtype_F)
else:
dist = np.array([d[neigh_ind[i]]
for i, d in enumerate(dist)],
dtype=dtype_F)
return dist, neigh_ind
else:
return neigh_ind
elif self._fit_method in ['ball_tree', 'kd_tree']:
results = self._tree.query_radius(X, radius,
return_distance=return_distance)
if return_distance:
ind, dist = results
return dist, ind
else:
return results
else:
raise ValueError("internal: _fit_method not recognized")
def radius_neighbors_graph(self, X, radius=None, mode='connectivity'):
"""Computes the (weighted) graph of Neighbors for points in X
Neighborhoods are restricted the points at a distance lower than
radius.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Sample data
radius : float
Radius of neighborhoods.
(default is the value passed to the constructor).
mode : {'connectivity', 'distance'}, optional
Type of returned matrix: 'connectivity' will return the
connectivity matrix with ones and zeros, in 'distance' the
edges are Euclidean distance between points.
Returns
-------
A : sparse matrix in CSR format, shape = [n_samples, n_samples]
A[i, j] is assigned the weight of edge that connects i to j.
Examples
--------
>>> X = [[0], [3], [1]]
>>> from sklearn.neighbors import NearestNeighbors
>>> neigh = NearestNeighbors(radius=1.5)
>>> neigh.fit(X) # doctest: +ELLIPSIS
NearestNeighbors(algorithm='auto', leaf_size=30, ...)
>>> A = neigh.radius_neighbors_graph(X)
>>> A.toarray()
array([[ 1., 0., 1.],
[ 0., 1., 0.],
[ 1., 0., 1.]])
See also
--------
kneighbors_graph
"""
X = safe_asarray(X)
if radius is None:
radius = self.radius
n_samples1 = X.shape[0]
n_samples2 = self._fit_X.shape[0]
# construct CSR matrix representation of the NN graph
if mode == 'connectivity':
A_ind = self.radius_neighbors(X, radius,
return_distance=False)
A_data = None
elif mode == 'distance':
dist, A_ind = self.radius_neighbors(X, radius,
return_distance=True)
A_data = np.concatenate(list(dist))
else:
raise ValueError(
'Unsupported mode, must be one of "connectivity", '
'or "distance" but got %s instead' % mode)
n_neighbors = np.array([len(a) for a in A_ind])
n_nonzero = np.sum(n_neighbors)
if A_data is None:
A_data = np.ones(n_nonzero)
A_ind = np.concatenate(list(A_ind))
A_indptr = np.concatenate((np.zeros(1, dtype=int),
np.cumsum(n_neighbors)))
return csr_matrix((A_data, A_ind, A_indptr),
shape=(n_samples1, n_samples2))
class SupervisedFloatMixin(object):
def fit(self, X, y):
"""Fit the model using X as training data and y as target values
Parameters
----------
X : {array-like, sparse matrix, BallTree, KDTree}
Training data. If array or matrix, shape = [n_samples, n_features]
y : {array-like, sparse matrix}
Target values, array of float values, shape = [n_samples]
or [n_samples, n_outputs]
"""
if not isinstance(X, (KDTree, BallTree)):
X, y = check_arrays(X, y, sparse_format="csr")
self._y = y
return self._fit(X)
class SupervisedIntegerMixin(object):
def fit(self, X, y):
"""Fit the model using X as training data and y as target values
Parameters
----------
X : {array-like, sparse matrix, BallTree, KDTree}
Training data. If array or matrix, shape = [n_samples, n_features]
y : {array-like, sparse matrix}
Target values of shape = [n_samples] or [n_samples, n_outputs]
"""
if not isinstance(X, (KDTree, BallTree)):
X, y = check_arrays(X, y, sparse_format="csr")
if y.ndim == 1 or y.ndim == 2 and y.shape[1] == 1:
if y.ndim != 1:
warnings.warn("A column-vector y was passed when a 1d array "
"was expected. Please change the shape of y to "
"(n_samples, ), for example using ravel().",
DataConversionWarning, stacklevel=2)
self.outputs_2d_ = False
y = y.reshape((-1, 1))
else:
self.outputs_2d_ = True
self.classes_ = []
self._y = np.empty(y.shape, dtype=np.int)
for k in range(self._y.shape[1]):
classes, self._y[:, k] = np.unique(y[:, k], return_inverse=True)
self.classes_.append(classes)
if not self.outputs_2d_:
self.classes_ = self.classes_[0]
self._y = self._y.ravel()
return self._fit(X)
class UnsupervisedMixin(object):
def fit(self, X, y=None):
"""Fit the model using X as training data
Parameters
----------
X : {array-like, sparse matrix, BallTree, KDTree}
Training data. If array or matrix, shape = [n_samples, n_features]
"""
return self._fit(X)
| apache-2.0 |
tedelhourani/ansible | test/units/module_utils/facts/test_collector.py | 13 | 16812 | # This file is part of Ansible
# -*- coding: utf-8 -*-
#
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# Make coding more python3-ish
from __future__ import (absolute_import, division)
__metaclass__ = type
from collections import defaultdict
# for testing
from ansible.compat.tests import unittest
from ansible.module_utils.facts import collector
from ansible.module_utils.facts import default_collectors
class TestFindCollectorsForPlatform(unittest.TestCase):
def test(self):
compat_platforms = [{'system': 'Generic'}]
res = collector.find_collectors_for_platform(default_collectors.collectors,
compat_platforms)
for coll_class in res:
self.assertIn(coll_class._platform, ('Generic'))
def test_linux(self):
compat_platforms = [{'system': 'Linux'}]
res = collector.find_collectors_for_platform(default_collectors.collectors,
compat_platforms)
for coll_class in res:
self.assertIn(coll_class._platform, ('Linux'))
def test_linux_or_generic(self):
compat_platforms = [{'system': 'Generic'}, {'system': 'Linux'}]
res = collector.find_collectors_for_platform(default_collectors.collectors,
compat_platforms)
for coll_class in res:
self.assertIn(coll_class._platform, ('Generic', 'Linux'))
class TestSelectCollectorNames(unittest.TestCase):
def test(self):
collector_names = set(['distribution', 'all_ipv4_addresses',
'local', 'pkg_mgr'])
all_fact_subsets = self._all_fact_subsets()
all_collector_classes = self._all_collector_classes()
res = collector.select_collector_classes(collector_names,
all_fact_subsets,
all_collector_classes)
expected = [default_collectors.DistributionFactCollector,
default_collectors.PkgMgrFactCollector]
self.assertEqual(res, expected)
def test_reverse(self):
collector_names = set(['distribution', 'all_ipv4_addresses',
'local', 'pkg_mgr'])
all_fact_subsets = self._all_fact_subsets()
all_collector_classes = self._all_collector_classes()
all_collector_classes.reverse()
res = collector.select_collector_classes(collector_names,
all_fact_subsets,
all_collector_classes)
expected = [default_collectors.PkgMgrFactCollector,
default_collectors.DistributionFactCollector]
self.assertEqual(res, expected)
def test_default_collectors(self):
platform_info = {'system': 'Generic'}
compat_platforms = [platform_info]
collectors_for_platform = collector.find_collectors_for_platform(default_collectors.collectors,
compat_platforms)
all_fact_subsets, aliases_map = collector.build_fact_id_to_collector_map(collectors_for_platform)
all_valid_subsets = frozenset(all_fact_subsets.keys())
collector_names = collector.get_collector_names(valid_subsets=all_valid_subsets,
aliases_map=aliases_map,
platform_info=platform_info)
collector.select_collector_classes(collector_names,
all_fact_subsets,
default_collectors.collectors)
def _all_collector_classes(self):
return [default_collectors.DistributionFactCollector,
default_collectors.PkgMgrFactCollector,
default_collectors.LinuxNetworkCollector]
def _all_fact_subsets(self, data=None):
all_fact_subsets = defaultdict(list)
_data = {'pkg_mgr': [default_collectors.PkgMgrFactCollector],
'distribution': [default_collectors.DistributionFactCollector],
'network': [default_collectors.LinuxNetworkCollector]}
data = data or _data
for key, value in data.items():
all_fact_subsets[key] = value
return all_fact_subsets
class TestGetCollectorNames(unittest.TestCase):
def test_none(self):
res = collector.get_collector_names()
self.assertIsInstance(res, set)
self.assertEqual(res, set([]))
def test_empty_sets(self):
res = collector.get_collector_names(valid_subsets=frozenset([]),
minimal_gather_subset=frozenset([]),
gather_subset=[])
self.assertIsInstance(res, set)
self.assertEqual(res, set([]))
def test_empty_valid_and_min_with_all_gather_subset(self):
res = collector.get_collector_names(valid_subsets=frozenset([]),
minimal_gather_subset=frozenset([]),
gather_subset=['all'])
self.assertIsInstance(res, set)
self.assertEqual(res, set([]))
def test_one_valid_with_all_gather_subset(self):
valid_subsets = frozenset(['my_fact'])
res = collector.get_collector_names(valid_subsets=valid_subsets,
minimal_gather_subset=frozenset([]),
gather_subset=['all'])
self.assertIsInstance(res, set)
self.assertEqual(res, set(['my_fact']))
def _compare_res(self, gather_subset1, gather_subset2,
valid_subsets=None, min_subset=None):
valid_subsets = valid_subsets or frozenset()
minimal_gather_subset = min_subset or frozenset()
res1 = collector.get_collector_names(valid_subsets=valid_subsets,
minimal_gather_subset=minimal_gather_subset,
gather_subset=gather_subset1)
res2 = collector.get_collector_names(valid_subsets=valid_subsets,
minimal_gather_subset=minimal_gather_subset,
gather_subset=gather_subset2)
return res1, res2
def test_not_all_other_order(self):
valid_subsets = frozenset(['min_fact', 'something_else', 'whatever'])
minimal_gather_subset = frozenset(['min_fact'])
res1, res2 = self._compare_res(['!all', 'whatever'],
['whatever', '!all'],
valid_subsets=valid_subsets,
min_subset=minimal_gather_subset)
self.assertEqual(res1, res2)
self.assertEqual(res1, set(['min_fact', 'whatever']))
def test_not_all_other_order_min(self):
valid_subsets = frozenset(['min_fact', 'something_else', 'whatever'])
minimal_gather_subset = frozenset(['min_fact'])
res1, res2 = self._compare_res(['!min_fact', 'whatever'],
['whatever', '!min_fact'],
valid_subsets=valid_subsets,
min_subset=minimal_gather_subset)
self.assertEqual(res1, res2)
self.assertEqual(res1, set(['whatever']))
def test_one_minimal_with_all_gather_subset(self):
my_fact = 'my_fact'
valid_subsets = frozenset([my_fact])
minimal_gather_subset = valid_subsets
res = collector.get_collector_names(valid_subsets=valid_subsets,
minimal_gather_subset=minimal_gather_subset,
gather_subset=['all'])
self.assertIsInstance(res, set)
self.assertEqual(res, set(['my_fact']))
def test_with_all_gather_subset(self):
valid_subsets = frozenset(['my_fact', 'something_else', 'whatever'])
minimal_gather_subset = frozenset(['my_fact'])
# even with '!all', the minimal_gather_subset should be returned
res = collector.get_collector_names(valid_subsets=valid_subsets,
minimal_gather_subset=minimal_gather_subset,
gather_subset=['all'])
self.assertIsInstance(res, set)
self.assertEqual(res, set(['my_fact', 'something_else', 'whatever']))
def test_one_minimal_with_not_all_gather_subset(self):
valid_subsets = frozenset(['my_fact', 'something_else', 'whatever'])
minimal_gather_subset = frozenset(['my_fact'])
# even with '!all', the minimal_gather_subset should be returned
res = collector.get_collector_names(valid_subsets=valid_subsets,
minimal_gather_subset=minimal_gather_subset,
gather_subset=['!all'])
self.assertIsInstance(res, set)
self.assertEqual(res, set(['my_fact']))
def test_gather_subset_excludes(self):
valid_subsets = frozenset(['my_fact', 'something_else', 'whatever'])
minimal_gather_subset = frozenset(['min_fact', 'min_another'])
# even with '!all', the minimal_gather_subset should be returned
res = collector.get_collector_names(valid_subsets=valid_subsets,
minimal_gather_subset=minimal_gather_subset,
# gather_subset=set(['all', '!my_fact', '!whatever']))
# gather_subset=['all', '!my_fact', '!whatever'])
gather_subset=['!min_fact', '!whatever'])
self.assertIsInstance(res, set)
# min_another is in minimal_gather_subset, so always returned
self.assertEqual(res, set(['min_another']))
def test_gather_subset_excludes_ordering(self):
valid_subsets = frozenset(['my_fact', 'something_else', 'whatever'])
minimal_gather_subset = frozenset(['my_fact'])
res = collector.get_collector_names(valid_subsets=valid_subsets,
minimal_gather_subset=minimal_gather_subset,
gather_subset=['!all', 'whatever'])
self.assertIsInstance(res, set)
# excludes are higher precedence than includes, so !all excludes everything
# and then minimal_gather_subset is added. so '!all', 'other' == '!all'
self.assertEqual(res, set(['my_fact', 'whatever']))
def test_gather_subset_excludes_min(self):
valid_subsets = frozenset(['min_fact', 'something_else', 'whatever'])
minimal_gather_subset = frozenset(['min_fact'])
res = collector.get_collector_names(valid_subsets=valid_subsets,
minimal_gather_subset=minimal_gather_subset,
gather_subset=['whatever', '!min'])
self.assertIsInstance(res, set)
# excludes are higher precedence than includes, so !all excludes everything
# and then minimal_gather_subset is added. so '!all', 'other' == '!all'
self.assertEqual(res, set(['whatever']))
def test_gather_subset_excludes_min_and_all(self):
valid_subsets = frozenset(['min_fact', 'something_else', 'whatever'])
minimal_gather_subset = frozenset(['min_fact'])
res = collector.get_collector_names(valid_subsets=valid_subsets,
minimal_gather_subset=minimal_gather_subset,
gather_subset=['whatever', '!all', '!min'])
self.assertIsInstance(res, set)
# excludes are higher precedence than includes, so !all excludes everything
# and then minimal_gather_subset is added. so '!all', 'other' == '!all'
self.assertEqual(res, set(['whatever']))
def test_invaid_gather_subset(self):
valid_subsets = frozenset(['my_fact', 'something_else'])
minimal_gather_subset = frozenset(['my_fact'])
self.assertRaisesRegexp(TypeError,
'Bad subset .* given to Ansible.*allowed\:.*all,.*my_fact.*',
collector.get_collector_names,
valid_subsets=valid_subsets,
minimal_gather_subset=minimal_gather_subset,
gather_subset=['my_fact', 'not_a_valid_gather_subset'])
class TestCollectorClassesFromGatherSubset(unittest.TestCase):
def _classes(self,
all_collector_classes=None,
valid_subsets=None,
minimal_gather_subset=None,
gather_subset=None,
gather_timeout=None):
return collector.collector_classes_from_gather_subset(all_collector_classes=all_collector_classes,
valid_subsets=valid_subsets,
minimal_gather_subset=minimal_gather_subset,
gather_subset=gather_subset,
gather_timeout=gather_timeout)
def test_no_args(self):
res = self._classes()
self.assertIsInstance(res, list)
self.assertEqual(res, [])
def test(self):
res = self._classes(all_collector_classes=default_collectors.collectors,
gather_subset=['!all'])
self.assertIsInstance(res, list)
self.assertEqual(res, [])
def test_env(self):
res = self._classes(all_collector_classes=default_collectors.collectors,
gather_subset=['env'])
self.assertIsInstance(res, list)
self.assertEqual(res, [default_collectors.EnvFactCollector])
def test_facter(self):
res = self._classes(all_collector_classes=default_collectors.collectors,
gather_subset=set(['env', 'facter']))
self.assertIsInstance(res, list)
self.assertEqual(set(res),
set([default_collectors.EnvFactCollector,
default_collectors.FacterFactCollector]))
def test_facter_ohai(self):
res = self._classes(all_collector_classes=default_collectors.collectors,
gather_subset=set(['env', 'facter', 'ohai']))
self.assertIsInstance(res, list)
self.assertEqual(set(res),
set([default_collectors.EnvFactCollector,
default_collectors.FacterFactCollector,
default_collectors.OhaiFactCollector]))
def test_just_facter(self):
res = self._classes(all_collector_classes=default_collectors.collectors,
gather_subset=set(['facter']))
self.assertIsInstance(res, list)
self.assertEqual(set(res),
set([default_collectors.FacterFactCollector]))
def test_collector_specified_multiple_times(self):
res = self._classes(all_collector_classes=default_collectors.collectors,
gather_subset=['platform', 'all', 'machine'])
self.assertIsInstance(res, list)
self.assertIn(default_collectors.PlatformFactCollector,
res)
def test_unknown_collector(self):
# something claims 'unknown_collector' is a valid gather_subset, but there is
# no FactCollector mapped to 'unknown_collector'
self.assertRaisesRegexp(TypeError,
'Bad subset.*unknown_collector.*given to Ansible.*allowed\:.*all,.*env.*',
self._classes,
all_collector_classes=default_collectors.collectors,
gather_subset=['env', 'unknown_collector'])
| gpl-3.0 |
Thraxis/SickRage | lib/github/ContentFile.py | 74 | 6775 | # -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <[email protected]> #
# Copyright 2012 Zearin <[email protected]> #
# Copyright 2013 AKFish <[email protected]> #
# Copyright 2013 Vincent Jacques <[email protected]> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import base64
import sys
import github.GithubObject
import github.Repository
atLeastPython3 = sys.hexversion >= 0x03000000
class ContentFile(github.GithubObject.CompletableGithubObject):
"""
This class represents ContentFiles as returned for example by http://developer.github.com/v3/todo
"""
@property
def content(self):
"""
:type: string
"""
self._completeIfNotSet(self._content)
return self._content.value
@property
def decoded_content(self):
assert self.encoding == "base64", "unsupported encoding: %s" % self.encoding
if atLeastPython3:
content = bytearray(self.content, "utf-8") # pragma no cover (covered by tests with Python 3.2)
else:
content = self.content
return base64.b64decode(content)
@property
def encoding(self):
"""
:type: string
"""
self._completeIfNotSet(self._encoding)
return self._encoding.value
@property
def git_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._git_url)
return self._git_url.value
@property
def html_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._html_url)
return self._html_url.value
@property
def name(self):
"""
:type: string
"""
self._completeIfNotSet(self._name)
return self._name.value
@property
def path(self):
"""
:type: string
"""
self._completeIfNotSet(self._path)
return self._path.value
@property
def repository(self):
"""
:type: :class:`github.Repository.Repository`
"""
if self._repository is github.GithubObject.NotSet:
# The repository was not set automatically, so it must be looked up by url.
repo_url = "/".join(self.url.split("/")[:6]) # pragma no cover (Should be covered)
self._repository = github.GithubObject._ValuedAttribute(github.Repository.Repository(self._requester, self._headers, {'url': repo_url}, completed=False)) # pragma no cover (Should be covered)
return self._repository.value
@property
def sha(self):
"""
:type: string
"""
self._completeIfNotSet(self._sha)
return self._sha.value
@property
def size(self):
"""
:type: integer
"""
self._completeIfNotSet(self._size)
return self._size.value
@property
def type(self):
"""
:type: string
"""
self._completeIfNotSet(self._type)
return self._type.value
@property
def url(self):
"""
:type: string
"""
self._completeIfNotSet(self._url)
return self._url.value
def _initAttributes(self):
self._content = github.GithubObject.NotSet
self._encoding = github.GithubObject.NotSet
self._git_url = github.GithubObject.NotSet
self._html_url = github.GithubObject.NotSet
self._name = github.GithubObject.NotSet
self._path = github.GithubObject.NotSet
self._repository = github.GithubObject.NotSet
self._sha = github.GithubObject.NotSet
self._size = github.GithubObject.NotSet
self._type = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "content" in attributes: # pragma no branch
self._content = self._makeStringAttribute(attributes["content"])
if "encoding" in attributes: # pragma no branch
self._encoding = self._makeStringAttribute(attributes["encoding"])
if "git_url" in attributes: # pragma no branch
self._git_url = self._makeStringAttribute(attributes["git_url"])
if "html_url" in attributes: # pragma no branch
self._html_url = self._makeStringAttribute(attributes["html_url"])
if "name" in attributes: # pragma no branch
self._name = self._makeStringAttribute(attributes["name"])
if "path" in attributes: # pragma no branch
self._path = self._makeStringAttribute(attributes["path"])
if "repository" in attributes: # pragma no branch
self._repository = self._makeClassAttribute(github.Repository.Repository, attributes["repository"])
if "sha" in attributes: # pragma no branch
self._sha = self._makeStringAttribute(attributes["sha"])
if "size" in attributes: # pragma no branch
self._size = self._makeIntAttribute(attributes["size"])
if "type" in attributes: # pragma no branch
self._type = self._makeStringAttribute(attributes["type"])
if "url" in attributes: # pragma no branch
self._url = self._makeStringAttribute(attributes["url"])
| gpl-3.0 |
RubenKelevra/rethinkdb | external/v8_3.30.33.16/build/gyp/tools/pretty_vcproj.py | 2637 | 9586 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Make the format of a vcproj really pretty.
This script normalize and sort an xml. It also fetches all the properties
inside linked vsprops and include them explicitly in the vcproj.
It outputs the resulting xml to stdout.
"""
__author__ = 'nsylvain (Nicolas Sylvain)'
import os
import sys
from xml.dom.minidom import parse
from xml.dom.minidom import Node
REPLACEMENTS = dict()
ARGUMENTS = None
class CmpTuple(object):
"""Compare function between 2 tuple."""
def __call__(self, x, y):
return cmp(x[0], y[0])
class CmpNode(object):
"""Compare function between 2 xml nodes."""
def __call__(self, x, y):
def get_string(node):
node_string = "node"
node_string += node.nodeName
if node.nodeValue:
node_string += node.nodeValue
if node.attributes:
# We first sort by name, if present.
node_string += node.getAttribute("Name")
all_nodes = []
for (name, value) in node.attributes.items():
all_nodes.append((name, value))
all_nodes.sort(CmpTuple())
for (name, value) in all_nodes:
node_string += name
node_string += value
return node_string
return cmp(get_string(x), get_string(y))
def PrettyPrintNode(node, indent=0):
if node.nodeType == Node.TEXT_NODE:
if node.data.strip():
print '%s%s' % (' '*indent, node.data.strip())
return
if node.childNodes:
node.normalize()
# Get the number of attributes
attr_count = 0
if node.attributes:
attr_count = node.attributes.length
# Print the main tag
if attr_count == 0:
print '%s<%s>' % (' '*indent, node.nodeName)
else:
print '%s<%s' % (' '*indent, node.nodeName)
all_attributes = []
for (name, value) in node.attributes.items():
all_attributes.append((name, value))
all_attributes.sort(CmpTuple())
for (name, value) in all_attributes:
print '%s %s="%s"' % (' '*indent, name, value)
print '%s>' % (' '*indent)
if node.nodeValue:
print '%s %s' % (' '*indent, node.nodeValue)
for sub_node in node.childNodes:
PrettyPrintNode(sub_node, indent=indent+2)
print '%s</%s>' % (' '*indent, node.nodeName)
def FlattenFilter(node):
"""Returns a list of all the node and sub nodes."""
node_list = []
if (node.attributes and
node.getAttribute('Name') == '_excluded_files'):
# We don't add the "_excluded_files" filter.
return []
for current in node.childNodes:
if current.nodeName == 'Filter':
node_list.extend(FlattenFilter(current))
else:
node_list.append(current)
return node_list
def FixFilenames(filenames, current_directory):
new_list = []
for filename in filenames:
if filename:
for key in REPLACEMENTS:
filename = filename.replace(key, REPLACEMENTS[key])
os.chdir(current_directory)
filename = filename.strip('"\' ')
if filename.startswith('$'):
new_list.append(filename)
else:
new_list.append(os.path.abspath(filename))
return new_list
def AbsoluteNode(node):
"""Makes all the properties we know about in this node absolute."""
if node.attributes:
for (name, value) in node.attributes.items():
if name in ['InheritedPropertySheets', 'RelativePath',
'AdditionalIncludeDirectories',
'IntermediateDirectory', 'OutputDirectory',
'AdditionalLibraryDirectories']:
# We want to fix up these paths
path_list = value.split(';')
new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1]))
node.setAttribute(name, ';'.join(new_list))
if not value:
node.removeAttribute(name)
def CleanupVcproj(node):
"""For each sub node, we call recursively this function."""
for sub_node in node.childNodes:
AbsoluteNode(sub_node)
CleanupVcproj(sub_node)
# Normalize the node, and remove all extranous whitespaces.
for sub_node in node.childNodes:
if sub_node.nodeType == Node.TEXT_NODE:
sub_node.data = sub_node.data.replace("\r", "")
sub_node.data = sub_node.data.replace("\n", "")
sub_node.data = sub_node.data.rstrip()
# Fix all the semicolon separated attributes to be sorted, and we also
# remove the dups.
if node.attributes:
for (name, value) in node.attributes.items():
sorted_list = sorted(value.split(';'))
unique_list = []
for i in sorted_list:
if not unique_list.count(i):
unique_list.append(i)
node.setAttribute(name, ';'.join(unique_list))
if not value:
node.removeAttribute(name)
if node.childNodes:
node.normalize()
# For each node, take a copy, and remove it from the list.
node_array = []
while node.childNodes and node.childNodes[0]:
# Take a copy of the node and remove it from the list.
current = node.childNodes[0]
node.removeChild(current)
# If the child is a filter, we want to append all its children
# to this same list.
if current.nodeName == 'Filter':
node_array.extend(FlattenFilter(current))
else:
node_array.append(current)
# Sort the list.
node_array.sort(CmpNode())
# Insert the nodes in the correct order.
for new_node in node_array:
# But don't append empty tool node.
if new_node.nodeName == 'Tool':
if new_node.attributes and new_node.attributes.length == 1:
# This one was empty.
continue
if new_node.nodeName == 'UserMacro':
continue
node.appendChild(new_node)
def GetConfiguationNodes(vcproj):
#TODO(nsylvain): Find a better way to navigate the xml.
nodes = []
for node in vcproj.childNodes:
if node.nodeName == "Configurations":
for sub_node in node.childNodes:
if sub_node.nodeName == "Configuration":
nodes.append(sub_node)
return nodes
def GetChildrenVsprops(filename):
dom = parse(filename)
if dom.documentElement.attributes:
vsprops = dom.documentElement.getAttribute('InheritedPropertySheets')
return FixFilenames(vsprops.split(';'), os.path.dirname(filename))
return []
def SeekToNode(node1, child2):
# A text node does not have properties.
if child2.nodeType == Node.TEXT_NODE:
return None
# Get the name of the current node.
current_name = child2.getAttribute("Name")
if not current_name:
# There is no name. We don't know how to merge.
return None
# Look through all the nodes to find a match.
for sub_node in node1.childNodes:
if sub_node.nodeName == child2.nodeName:
name = sub_node.getAttribute("Name")
if name == current_name:
return sub_node
# No match. We give up.
return None
def MergeAttributes(node1, node2):
# No attributes to merge?
if not node2.attributes:
return
for (name, value2) in node2.attributes.items():
# Don't merge the 'Name' attribute.
if name == 'Name':
continue
value1 = node1.getAttribute(name)
if value1:
# The attribute exist in the main node. If it's equal, we leave it
# untouched, otherwise we concatenate it.
if value1 != value2:
node1.setAttribute(name, ';'.join([value1, value2]))
else:
# The attribute does nto exist in the main node. We append this one.
node1.setAttribute(name, value2)
# If the attribute was a property sheet attributes, we remove it, since
# they are useless.
if name == 'InheritedPropertySheets':
node1.removeAttribute(name)
def MergeProperties(node1, node2):
MergeAttributes(node1, node2)
for child2 in node2.childNodes:
child1 = SeekToNode(node1, child2)
if child1:
MergeProperties(child1, child2)
else:
node1.appendChild(child2.cloneNode(True))
def main(argv):
"""Main function of this vcproj prettifier."""
global ARGUMENTS
ARGUMENTS = argv
# check if we have exactly 1 parameter.
if len(argv) < 2:
print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] '
'[key2=value2]' % argv[0])
return 1
# Parse the keys
for i in range(2, len(argv)):
(key, value) = argv[i].split('=')
REPLACEMENTS[key] = value
# Open the vcproj and parse the xml.
dom = parse(argv[1])
# First thing we need to do is find the Configuration Node and merge them
# with the vsprops they include.
for configuration_node in GetConfiguationNodes(dom.documentElement):
# Get the property sheets associated with this configuration.
vsprops = configuration_node.getAttribute('InheritedPropertySheets')
# Fix the filenames to be absolute.
vsprops_list = FixFilenames(vsprops.strip().split(';'),
os.path.dirname(argv[1]))
# Extend the list of vsprops with all vsprops contained in the current
# vsprops.
for current_vsprops in vsprops_list:
vsprops_list.extend(GetChildrenVsprops(current_vsprops))
# Now that we have all the vsprops, we need to merge them.
for current_vsprops in vsprops_list:
MergeProperties(configuration_node,
parse(current_vsprops).documentElement)
# Now that everything is merged, we need to cleanup the xml.
CleanupVcproj(dom.documentElement)
# Finally, we use the prett xml function to print the vcproj back to the
# user.
#print dom.toprettyxml(newl="\n")
PrettyPrintNode(dom.documentElement)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| agpl-3.0 |
sebdelsol/pyload | module/plugins/hoster/RapidshareCom.py | 1 | 7801 | # -*- coding: utf-8 -*-
import re
from module.network.RequestFactory import getURL
from module.plugins.Hoster import Hoster
def getInfo(urls):
ids = ""
names = ""
p = re.compile(RapidshareCom.__pattern__)
for url in urls:
r = p.search(url)
if r.group("name"):
ids += "," + r.group("id")
names += "," + r.group("name")
elif r.group("name_new"):
ids += "," + r.group("id_new")
names += "," + r.group("name_new")
url = "http://api.rapidshare.com/cgi-bin/rsapi.cgi?sub=checkfiles&files=%s&filenames=%s" % (ids[1:], names[1:])
api = getURL(url)
result = []
i = 0
for res in api.split():
tmp = res.split(",")
if tmp[4] in ("0", "4", "5"):
status = 1
elif tmp[4] == "1":
status = 2
else:
status = 3
result.append((tmp[1], tmp[2], status, urls[i]))
i += 1
yield result
class RapidshareCom(Hoster):
__name__ = "RapidshareCom"
__type__ = "hoster"
__version__ = "1.40"
__pattern__ = r'https?://(?:www\.)?rapidshare\.com/(?:files/(?P<id>\d+)/(?P<name>[^?]+)|#!download\|(?:\w+)\|(?P<id_new>\d+)\|(?P<name_new>[^|]+))'
__description__ = """Rapidshare.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("spoob", "[email protected]"),
("RaNaN", "[email protected]"),
("mkaay", "[email protected]")]
def setup(self):
self.no_download = True
self.api_data = None
self.offset = 0
self.dl_dict = {}
self.id = None
self.name = None
self.chunkLimit = -1 if self.premium else 1
self.multiDL = self.resumeDownload = self.premium
def process(self, pyfile):
self.url = pyfile.url
self.prepare()
def prepare(self):
m = re.match(self.__pattern__, self.url)
if m.group("name"):
self.id = m.group("id")
self.name = m.group("name")
else:
self.id = m.group("id_new")
self.name = m.group("name_new")
self.download_api_data()
if self.api_data['status'] == "1":
self.pyfile.name = self.get_file_name()
if self.premium:
self.handlePremium()
else:
self.handleFree()
elif self.api_data['status'] == "2":
self.logInfo(_("Rapidshare: Traffic Share (direct download)"))
self.pyfile.name = self.get_file_name()
self.download(self.pyfile.url, get={"directstart": 1})
elif self.api_data['status'] in ("0", "4", "5"):
self.offline()
elif self.api_data['status'] == "3":
self.tempOffline()
else:
self.error(_("Unknown response code"))
def handleFree(self):
while self.no_download:
self.dl_dict = self.freeWait()
#tmp = "#!download|%(server)s|%(id)s|%(name)s|%(size)s"
download = "http://%(host)s/cgi-bin/rsapi.cgi?sub=download&editparentlocation=0&bin=1&fileid=%(id)s&filename=%(name)s&dlauth=%(auth)s" % self.dl_dict
self.logDebug("RS API Request: %s" % download)
self.download(download, ref=False)
check = self.checkDownload({"ip": "You need RapidPro to download more files from your IP address",
"auth": "Download auth invalid"})
if check == "ip":
self.setWait(60)
self.logInfo(_("Already downloading from this ip address, waiting 60 seconds"))
self.wait()
self.handleFree()
elif check == "auth":
self.logInfo(_("Invalid Auth Code, download will be restarted"))
self.offset += 5
self.handleFree()
def handlePremium(self):
info = self.account.getAccountInfo(self.user, True)
self.logDebug("Use Premium Account")
url = self.api_data['mirror']
self.download(url, get={"directstart": 1})
def download_api_data(self, force=False):
"""
http://images.rapidshare.com/apidoc.txt
"""
if self.api_data and not force:
return
api_url_base = "http://api.rapidshare.com/cgi-bin/rsapi.cgi"
api_param_file = {"sub": "checkfiles", "incmd5": "1", "files": self.id, "filenames": self.name}
html = self.load(api_url_base, cookies=False, get=api_param_file).strip()
self.logDebug("RS INFO API: %s" % html)
if html.startswith("ERROR"):
return
fields = html.split(",")
# status codes:
# 0=File not found
# 1=File OK (Anonymous downloading)
# 3=Server down
# 4=File marked as illegal
# 5=Anonymous file locked, because it has more than 10 downloads already
# 50+n=File OK (TrafficShare direct download type "n" without any logging.)
# 100+n=File OK (TrafficShare direct download type "n" with logging.
# Read our privacy policy to see what is logged.)
self.api_data = {"fileid": fields[0], "filename": fields[1], "size": int(fields[2]), "serverid": fields[3],
"status": fields[4], "shorthost": fields[5], "checksum": fields[6].strip().lower()}
if int(self.api_data['status']) > 100:
self.api_data['status'] = str(int(self.api_data['status']) - 100)
elif int(self.api_data['status']) > 50:
self.api_data['status'] = str(int(self.api_data['status']) - 50)
self.api_data['mirror'] = "http://rs%(serverid)s%(shorthost)s.rapidshare.com/files/%(fileid)s/%(filename)s" % self.api_data
def freeWait(self):
"""downloads html with the important information
"""
self.no_download = True
id = self.id
name = self.name
prepare = "https://api.rapidshare.com/cgi-bin/rsapi.cgi?sub=download&fileid=%(id)s&filename=%(name)s&try=1&cbf=RSAPIDispatcher&cbid=1" % {
"name": name, "id": id}
self.logDebug("RS API Request: %s" % prepare)
result = self.load(prepare, ref=False)
self.logDebug("RS API Result: %s" % result)
between_wait = re.search("You need to wait (\d+) seconds", result)
if "You need RapidPro to download more files from your IP address" in result:
self.setWait(60)
self.logInfo(_("Already downloading from this ip address, waiting 60 seconds"))
self.wait()
elif ("Too many users downloading from this server right now" in result or
"All free download slots are full" in result):
self.setWait(120)
self.logInfo(_("RapidShareCom: No free slots"))
self.wait()
elif "This file is too big to download it for free" in result:
self.fail(_("You need a premium account for this file"))
elif "Filename invalid." in result:
self.fail(_("Filename reported invalid"))
elif between_wait:
self.setWait(int(between_wait.group(1)), True)
self.wait()
else:
self.no_download = False
tmp, info = result.split(":")
data = info.split(",")
dl_dict = {"id": id,
"name": name,
"host": data[0],
"auth": data[1],
"server": self.api_data['serverid'],
"size": self.api_data['size']}
self.setWait(int(data[2]) + 2 + self.offset)
self.wait()
return dl_dict
def get_file_name(self):
if self.api_data['filename']:
return self.api_data['filename']
return self.url.split("/")[-1]
| gpl-3.0 |
PyMNtos/stacks | stacks/library/migrations/0003_auto__add_field_author_uuid.py | 1 | 2747 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding M2M table for field authors on 'Book'
m2m_table_name = db.shorten_name(u'library_book_authors')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('book', models.ForeignKey(orm[u'library.book'], null=False)),
('author', models.ForeignKey(orm[u'library.author'], null=False))
))
db.create_unique(m2m_table_name, ['book_id', 'author_id'])
# Adding field 'Author.uuid'
db.add_column(u'library_author', 'uuid',
self.gf('django.db.models.fields.CharField')(default='None', max_length=36, db_index=True),
keep_default=False)
def backwards(self, orm):
# Removing M2M table for field authors on 'Book'
db.delete_table(db.shorten_name(u'library_book_authors'))
# Deleting field 'Author.uuid'
db.delete_column(u'library_author', 'uuid')
models = {
u'library.author': {
'Meta': {'object_name': 'Author'},
'firstname': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lastname': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '36', 'db_index': 'True'})
},
u'library.book': {
'Meta': {'object_name': 'Book'},
'authors': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['library.Author']", 'symmetrical': 'False'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'isbn10': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True'}),
'isbn13': ('django.db.models.fields.CharField', [], {'max_length': '13', 'null': 'True'}),
'publish_date': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'publisher': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '36', 'db_index': 'True'})
}
}
complete_apps = ['library'] | gpl-2.0 |
Jordonbc/GlassOS | Glass_OS/build/lib/GlassOS/libaries/requests/packages/urllib3/connection.py | 196 | 10286 | from __future__ import absolute_import
import datetime
import os
import sys
import socket
from socket import error as SocketError, timeout as SocketTimeout
import warnings
from .packages import six
try: # Python 3
from http.client import HTTPConnection as _HTTPConnection
from http.client import HTTPException # noqa: unused in this module
except ImportError:
from httplib import HTTPConnection as _HTTPConnection
from httplib import HTTPException # noqa: unused in this module
try: # Compiled with SSL?
import ssl
BaseSSLError = ssl.SSLError
except (ImportError, AttributeError): # Platform-specific: No SSL.
ssl = None
class BaseSSLError(BaseException):
pass
try: # Python 3:
# Not a no-op, we're adding this to the namespace so it can be imported.
ConnectionError = ConnectionError
except NameError: # Python 2:
class ConnectionError(Exception):
pass
from .exceptions import (
NewConnectionError,
ConnectTimeoutError,
SubjectAltNameWarning,
SystemTimeWarning,
)
from .packages.ssl_match_hostname import match_hostname
from .util.ssl_ import (
resolve_cert_reqs,
resolve_ssl_version,
ssl_wrap_socket,
assert_fingerprint,
)
from .util import connection
port_by_scheme = {
'http': 80,
'https': 443,
}
RECENT_DATE = datetime.date(2014, 1, 1)
class DummyConnection(object):
"""Used to detect a failed ConnectionCls import."""
pass
class HTTPConnection(_HTTPConnection, object):
"""
Based on httplib.HTTPConnection but provides an extra constructor
backwards-compatibility layer between older and newer Pythons.
Additional keyword parameters are used to configure attributes of the connection.
Accepted parameters include:
- ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
- ``source_address``: Set the source address for the current connection.
.. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x
- ``socket_options``: Set specific options on the underlying socket. If not specified, then
defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
For example, if you wish to enable TCP Keep Alive in addition to the defaults,
you might pass::
HTTPConnection.default_socket_options + [
(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
]
Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
"""
default_port = port_by_scheme['http']
#: Disable Nagle's algorithm by default.
#: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
#: Whether this connection verifies the host's certificate.
is_verified = False
def __init__(self, *args, **kw):
if six.PY3: # Python 3
kw.pop('strict', None)
# Pre-set source_address in case we have an older Python like 2.6.
self.source_address = kw.get('source_address')
if sys.version_info < (2, 7): # Python 2.6
# _HTTPConnection on Python 2.6 will balk at this keyword arg, but
# not newer versions. We can still use it when creating a
# connection though, so we pop it *after* we have saved it as
# self.source_address.
kw.pop('source_address', None)
#: The socket options provided by the user. If no options are
#: provided, we use the default options.
self.socket_options = kw.pop('socket_options', self.default_socket_options)
# Superclass also sets self.source_address in Python 2.7+.
_HTTPConnection.__init__(self, *args, **kw)
def _new_conn(self):
""" Establish a socket connection and set nodelay settings on it.
:return: New socket connection.
"""
extra_kw = {}
if self.source_address:
extra_kw['source_address'] = self.source_address
if self.socket_options:
extra_kw['socket_options'] = self.socket_options
try:
conn = connection.create_connection(
(self.host, self.port), self.timeout, **extra_kw)
except SocketTimeout as e:
raise ConnectTimeoutError(
self, "Connection to %s timed out. (connect timeout=%s)" %
(self.host, self.timeout))
except SocketError as e:
raise NewConnectionError(
self, "Failed to establish a new connection: %s" % e)
return conn
def _prepare_conn(self, conn):
self.sock = conn
# the _tunnel_host attribute was added in python 2.6.3 (via
# http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do
# not have them.
if getattr(self, '_tunnel_host', None):
# TODO: Fix tunnel so it doesn't depend on self.sock state.
self._tunnel()
# Mark this connection as not reusable
self.auto_open = 0
def connect(self):
conn = self._new_conn()
self._prepare_conn(conn)
class HTTPSConnection(HTTPConnection):
default_port = port_by_scheme['https']
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **kw):
HTTPConnection.__init__(self, host, port, strict=strict,
timeout=timeout, **kw)
self.key_file = key_file
self.cert_file = cert_file
# Required property for Google AppEngine 1.9.0 which otherwise causes
# HTTPS requests to go out as HTTP. (See Issue #356)
self._protocol = 'https'
def connect(self):
conn = self._new_conn()
self._prepare_conn(conn)
self.sock = ssl.wrap_socket(conn, self.key_file, self.cert_file)
class VerifiedHTTPSConnection(HTTPSConnection):
"""
Based on httplib.HTTPSConnection but wraps the socket with
SSL certification.
"""
cert_reqs = None
ca_certs = None
ca_cert_dir = None
ssl_version = None
assert_fingerprint = None
def set_cert(self, key_file=None, cert_file=None,
cert_reqs=None, ca_certs=None,
assert_hostname=None, assert_fingerprint=None,
ca_cert_dir=None):
if (ca_certs or ca_cert_dir) and cert_reqs is None:
cert_reqs = 'CERT_REQUIRED'
self.key_file = key_file
self.cert_file = cert_file
self.cert_reqs = cert_reqs
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
def connect(self):
# Add certificate verification
conn = self._new_conn()
resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs)
resolved_ssl_version = resolve_ssl_version(self.ssl_version)
hostname = self.host
if getattr(self, '_tunnel_host', None):
# _tunnel_host was added in Python 2.6.3
# (See: http://hg.python.org/cpython/rev/0f57b30a152f)
self.sock = conn
# Calls self._set_hostport(), so self.host is
# self._tunnel_host below.
self._tunnel()
# Mark this connection as not reusable
self.auto_open = 0
# Override the host with the one we're requesting data from.
hostname = self._tunnel_host
is_time_off = datetime.date.today() < RECENT_DATE
if is_time_off:
warnings.warn((
'System time is way off (before {0}). This will probably '
'lead to SSL verification errors').format(RECENT_DATE),
SystemTimeWarning
)
# Wrap socket using verification with the root certs in
# trusted_root_certs
self.sock = ssl_wrap_socket(conn, self.key_file, self.cert_file,
cert_reqs=resolved_cert_reqs,
ca_certs=self.ca_certs,
ca_cert_dir=self.ca_cert_dir,
server_hostname=hostname,
ssl_version=resolved_ssl_version)
if self.assert_fingerprint:
assert_fingerprint(self.sock.getpeercert(binary_form=True),
self.assert_fingerprint)
elif resolved_cert_reqs != ssl.CERT_NONE \
and self.assert_hostname is not False:
cert = self.sock.getpeercert()
if not cert.get('subjectAltName', ()):
warnings.warn((
'Certificate for {0} has no `subjectAltName`, falling back to check for a '
'`commonName` for now. This feature is being removed by major browsers and '
'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 '
'for details.)'.format(hostname)),
SubjectAltNameWarning
)
# In case the hostname is an IPv6 address, strip the square
# brackets from it before using it to validate. This is because
# a certificate with an IPv6 address in it won't have square
# brackets around that address. Sadly, match_hostname won't do this
# for us: it expects the plain host part without any extra work
# that might have been done to make it palatable to httplib.
asserted_hostname = self.assert_hostname or hostname
asserted_hostname = asserted_hostname.strip('[]')
match_hostname(cert, asserted_hostname)
self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED or
self.assert_fingerprint is not None)
if ssl:
# Make a copy for testing.
UnverifiedHTTPSConnection = HTTPSConnection
HTTPSConnection = VerifiedHTTPSConnection
else:
HTTPSConnection = DummyConnection
| mit |
lamby/redis-py | benchmarks/command_packer_benchmark.py | 49 | 3338 | import socket
import sys
from redis.connection import (Connection, SYM_STAR, SYM_DOLLAR, SYM_EMPTY,
SYM_CRLF, b)
from redis._compat import imap
from base import Benchmark
class StringJoiningConnection(Connection):
def send_packed_command(self, command):
"Send an already packed command to the Redis server"
if not self._sock:
self.connect()
try:
self._sock.sendall(command)
except socket.error:
e = sys.exc_info()[1]
self.disconnect()
if len(e.args) == 1:
_errno, errmsg = 'UNKNOWN', e.args[0]
else:
_errno, errmsg = e.args
raise ConnectionError("Error %s while writing to socket. %s." %
(_errno, errmsg))
except:
self.disconnect()
raise
def pack_command(self, *args):
"Pack a series of arguments into a value Redis command"
args_output = SYM_EMPTY.join([
SYM_EMPTY.join((SYM_DOLLAR, b(str(len(k))), SYM_CRLF, k, SYM_CRLF))
for k in imap(self.encode, args)])
output = SYM_EMPTY.join(
(SYM_STAR, b(str(len(args))), SYM_CRLF, args_output))
return output
class ListJoiningConnection(Connection):
def send_packed_command(self, command):
if not self._sock:
self.connect()
try:
if isinstance(command, str):
command = [command]
for item in command:
self._sock.sendall(item)
except socket.error:
e = sys.exc_info()[1]
self.disconnect()
if len(e.args) == 1:
_errno, errmsg = 'UNKNOWN', e.args[0]
else:
_errno, errmsg = e.args
raise ConnectionError("Error %s while writing to socket. %s." %
(_errno, errmsg))
except:
self.disconnect()
raise
def pack_command(self, *args):
output = []
buff = SYM_EMPTY.join(
(SYM_STAR, b(str(len(args))), SYM_CRLF))
for k in imap(self.encode, args):
if len(buff) > 6000 or len(k) > 6000:
buff = SYM_EMPTY.join(
(buff, SYM_DOLLAR, b(str(len(k))), SYM_CRLF))
output.append(buff)
output.append(k)
buff = SYM_CRLF
else:
buff = SYM_EMPTY.join((buff, SYM_DOLLAR, b(str(len(k))),
SYM_CRLF, k, SYM_CRLF))
output.append(buff)
return output
class CommandPackerBenchmark(Benchmark):
ARGUMENTS = (
{
'name': 'connection_class',
'values': [StringJoiningConnection, ListJoiningConnection]
},
{
'name': 'value_size',
'values': [10, 100, 1000, 10000, 100000, 1000000, 10000000,
100000000]
},
)
def setup(self, connection_class, value_size):
self.get_client(connection_class=connection_class)
def run(self, connection_class, value_size):
r = self.get_client()
x = 'a' * value_size
r.set('benchmark', x)
if __name__ == '__main__':
CommandPackerBenchmark().run_benchmark()
| mit |
sindhus/hasjob | hasjob/views/login.py | 4 | 1781 | # -*- coding: utf-8 -*-
from sqlalchemy.exc import IntegrityError
from flask import g, Response, redirect, flash
from flask.ext.lastuser import signal_user_session_refreshed
from coaster.views import get_next_url
from baseframe import csrf
from .. import app, lastuser
from ..signals import signal_login, signal_logout
from ..models import db, UserActiveAt
@app.route('/500')
def error500():
raise Exception("Something b0rked")
@app.route('/login')
@lastuser.login_handler
def login():
return {'scope': 'id email/* phone/* organizations/* teams/* notice/*'}
@app.route('/logout')
@lastuser.logout_handler
def logout():
flash(u"You are now logged out", category='info')
signal_logout.send(app, user=g.user)
return get_next_url()
@app.route('/login/redirect')
@lastuser.auth_handler
def lastuserauth():
signal_login.send(app, user=g.user)
db.session.commit()
return redirect(get_next_url())
@csrf.exempt
@app.route('/login/notify', methods=['POST'])
@lastuser.notification_handler
def lastusernotify(user):
db.session.commit()
@lastuser.auth_error_handler
def lastuser_error(error, error_description=None, error_uri=None):
if error == 'access_denied':
flash("You denied the request to login", category='error')
return redirect(get_next_url())
return Response(u"Error: %s\n"
u"Description: %s\n"
u"URI: %s" % (error, error_description, error_uri),
mimetype="text/plain")
@signal_user_session_refreshed.connect
def track_user(user):
db.session.add(UserActiveAt(user=user, board=g.board))
try:
db.session.commit()
except IntegrityError: # Small but not impossible chance we got two parallel signals
db.session.rollback()
| agpl-3.0 |
RockySteveJobs/python-for-android | python-modules/twisted/twisted/test/test_hook.py | 81 | 4290 |
# Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Test cases for twisted.hook module.
"""
from twisted.python import hook
from twisted.trial import unittest
class BaseClass:
"""
dummy class to help in testing.
"""
def __init__(self):
"""
dummy initializer
"""
self.calledBasePre = 0
self.calledBasePost = 0
self.calledBase = 0
def func(self, a, b):
"""
dummy method
"""
assert a == 1
assert b == 2
self.calledBase = self.calledBase + 1
class SubClass(BaseClass):
"""
another dummy class
"""
def __init__(self):
"""
another dummy initializer
"""
BaseClass.__init__(self)
self.calledSubPre = 0
self.calledSubPost = 0
self.calledSub = 0
def func(self, a, b):
"""
another dummy function
"""
assert a == 1
assert b == 2
BaseClass.func(self, a, b)
self.calledSub = self.calledSub + 1
_clean_BaseClass = BaseClass.__dict__.copy()
_clean_SubClass = SubClass.__dict__.copy()
def basePre(base, a, b):
"""
a pre-hook for the base class
"""
base.calledBasePre = base.calledBasePre + 1
def basePost(base, a, b):
"""
a post-hook for the base class
"""
base.calledBasePost = base.calledBasePost + 1
def subPre(sub, a, b):
"""
a pre-hook for the subclass
"""
sub.calledSubPre = sub.calledSubPre + 1
def subPost(sub, a, b):
"""
a post-hook for the subclass
"""
sub.calledSubPost = sub.calledSubPost + 1
class HookTestCase(unittest.TestCase):
"""
test case to make sure hooks are called
"""
def setUp(self):
"""Make sure we have clean versions of our classes."""
BaseClass.__dict__.clear()
BaseClass.__dict__.update(_clean_BaseClass)
SubClass.__dict__.clear()
SubClass.__dict__.update(_clean_SubClass)
def testBaseHook(self):
"""make sure that the base class's hook is called reliably
"""
base = BaseClass()
self.assertEquals(base.calledBase, 0)
self.assertEquals(base.calledBasePre, 0)
base.func(1,2)
self.assertEquals(base.calledBase, 1)
self.assertEquals(base.calledBasePre, 0)
hook.addPre(BaseClass, "func", basePre)
base.func(1, b=2)
self.assertEquals(base.calledBase, 2)
self.assertEquals(base.calledBasePre, 1)
hook.addPost(BaseClass, "func", basePost)
base.func(1, b=2)
self.assertEquals(base.calledBasePost, 1)
self.assertEquals(base.calledBase, 3)
self.assertEquals(base.calledBasePre, 2)
hook.removePre(BaseClass, "func", basePre)
hook.removePost(BaseClass, "func", basePost)
base.func(1, b=2)
self.assertEquals(base.calledBasePost, 1)
self.assertEquals(base.calledBase, 4)
self.assertEquals(base.calledBasePre, 2)
def testSubHook(self):
"""test interactions between base-class hooks and subclass hooks
"""
sub = SubClass()
self.assertEquals(sub.calledSub, 0)
self.assertEquals(sub.calledBase, 0)
sub.func(1, b=2)
self.assertEquals(sub.calledSub, 1)
self.assertEquals(sub.calledBase, 1)
hook.addPre(SubClass, 'func', subPre)
self.assertEquals(sub.calledSub, 1)
self.assertEquals(sub.calledBase, 1)
self.assertEquals(sub.calledSubPre, 0)
self.assertEquals(sub.calledBasePre, 0)
sub.func(1, b=2)
self.assertEquals(sub.calledSub, 2)
self.assertEquals(sub.calledBase, 2)
self.assertEquals(sub.calledSubPre, 1)
self.assertEquals(sub.calledBasePre, 0)
# let the pain begin
hook.addPre(BaseClass, 'func', basePre)
BaseClass.func(sub, 1, b=2)
# sub.func(1, b=2)
self.assertEquals(sub.calledBase, 3)
self.assertEquals(sub.calledBasePre, 1, str(sub.calledBasePre))
sub.func(1, b=2)
self.assertEquals(sub.calledBasePre, 2)
self.assertEquals(sub.calledBase, 4)
self.assertEquals(sub.calledSubPre, 2)
self.assertEquals(sub.calledSub, 3)
testCases = [HookTestCase]
| apache-2.0 |
devGregA/code | build/lib.linux-x86_64-2.7/scrapy/tests/test_http_request.py | 33 | 30578 | import cgi
import unittest
from six.moves import xmlrpc_client as xmlrpclib
from six.moves.urllib.parse import urlparse
from scrapy.http import Request, FormRequest, XmlRpcRequest, Headers, HtmlResponse
class RequestTest(unittest.TestCase):
request_class = Request
default_method = 'GET'
default_headers = {}
default_meta = {}
def test_init(self):
# Request requires url in the constructor
self.assertRaises(Exception, self.request_class)
# url argument must be basestring
self.assertRaises(TypeError, self.request_class, 123)
r = self.request_class('http://www.example.com')
r = self.request_class("http://www.example.com")
assert isinstance(r.url, str)
self.assertEqual(r.url, "http://www.example.com")
self.assertEqual(r.method, self.default_method)
assert isinstance(r.headers, Headers)
self.assertEqual(r.headers, self.default_headers)
self.assertEqual(r.meta, self.default_meta)
meta = {"lala": "lolo"}
headers = {"caca": "coco"}
r = self.request_class("http://www.example.com", meta=meta, headers=headers, body="a body")
assert r.meta is not meta
self.assertEqual(r.meta, meta)
assert r.headers is not headers
self.assertEqual(r.headers["caca"], "coco")
def test_url_no_scheme(self):
self.assertRaises(ValueError, self.request_class, 'foo')
def test_headers(self):
# Different ways of setting headers attribute
url = 'http://www.scrapy.org'
headers = {'Accept':'gzip', 'Custom-Header':'nothing to tell you'}
r = self.request_class(url=url, headers=headers)
p = self.request_class(url=url, headers=r.headers)
self.assertEqual(r.headers, p.headers)
self.assertFalse(r.headers is headers)
self.assertFalse(p.headers is r.headers)
# headers must not be unicode
h = Headers({'key1': u'val1', u'key2': 'val2'})
h[u'newkey'] = u'newval'
for k, v in h.iteritems():
self.assert_(isinstance(k, str))
for s in v:
self.assert_(isinstance(s, str))
def test_eq(self):
url = 'http://www.scrapy.org'
r1 = self.request_class(url=url)
r2 = self.request_class(url=url)
self.assertNotEqual(r1, r2)
set_ = set()
set_.add(r1)
set_.add(r2)
self.assertEqual(len(set_), 2)
def test_url(self):
"""Request url tests"""
r = self.request_class(url="http://www.scrapy.org/path")
self.assertEqual(r.url, "http://www.scrapy.org/path")
# url quoting on creation
r = self.request_class(url="http://www.scrapy.org/blank%20space")
self.assertEqual(r.url, "http://www.scrapy.org/blank%20space")
r = self.request_class(url="http://www.scrapy.org/blank space")
self.assertEqual(r.url, "http://www.scrapy.org/blank%20space")
# url encoding
r1 = self.request_class(url=u"http://www.scrapy.org/price/\xa3", encoding="utf-8")
r2 = self.request_class(url=u"http://www.scrapy.org/price/\xa3", encoding="latin1")
self.assertEqual(r1.url, "http://www.scrapy.org/price/%C2%A3")
self.assertEqual(r2.url, "http://www.scrapy.org/price/%A3")
def test_body(self):
r1 = self.request_class(url="http://www.example.com/")
assert r1.body == ''
r2 = self.request_class(url="http://www.example.com/", body="")
assert isinstance(r2.body, str)
self.assertEqual(r2.encoding, 'utf-8') # default encoding
r3 = self.request_class(url="http://www.example.com/", body=u"Price: \xa3100", encoding='utf-8')
assert isinstance(r3.body, str)
self.assertEqual(r3.body, "Price: \xc2\xa3100")
r4 = self.request_class(url="http://www.example.com/", body=u"Price: \xa3100", encoding='latin1')
assert isinstance(r4.body, str)
self.assertEqual(r4.body, "Price: \xa3100")
def test_ajax_url(self):
# ascii url
r = self.request_class(url="http://www.example.com/ajax.html#!key=value")
self.assertEqual(r.url, "http://www.example.com/ajax.html?_escaped_fragment_=key%3Dvalue")
# unicode url
r = self.request_class(url=u"http://www.example.com/ajax.html#!key=value")
self.assertEqual(r.url, "http://www.example.com/ajax.html?_escaped_fragment_=key%3Dvalue")
def test_copy(self):
"""Test Request copy"""
def somecallback():
pass
r1 = self.request_class("http://www.example.com", callback=somecallback, errback=somecallback)
r1.meta['foo'] = 'bar'
r2 = r1.copy()
# make sure copy does not propagate callbacks
assert r1.callback is somecallback
assert r1.errback is somecallback
assert r2.callback is r1.callback
assert r2.errback is r2.errback
# make sure meta dict is shallow copied
assert r1.meta is not r2.meta, "meta must be a shallow copy, not identical"
self.assertEqual(r1.meta, r2.meta)
# make sure headers attribute is shallow copied
assert r1.headers is not r2.headers, "headers must be a shallow copy, not identical"
self.assertEqual(r1.headers, r2.headers)
self.assertEqual(r1.encoding, r2.encoding)
self.assertEqual(r1.dont_filter, r2.dont_filter)
# Request.body can be identical since it's an immutable object (str)
def test_copy_inherited_classes(self):
"""Test Request children copies preserve their class"""
class CustomRequest(self.request_class):
pass
r1 = CustomRequest('http://www.example.com')
r2 = r1.copy()
assert type(r2) is CustomRequest
def test_replace(self):
"""Test Request.replace() method"""
r1 = self.request_class("http://www.example.com", method='GET')
hdrs = Headers(dict(r1.headers, key='value'))
r2 = r1.replace(method="POST", body="New body", headers=hdrs)
self.assertEqual(r1.url, r2.url)
self.assertEqual((r1.method, r2.method), ("GET", "POST"))
self.assertEqual((r1.body, r2.body), ('', "New body"))
self.assertEqual((r1.headers, r2.headers), (self.default_headers, hdrs))
# Empty attributes (which may fail if not compared properly)
r3 = self.request_class("http://www.example.com", meta={'a': 1}, dont_filter=True)
r4 = r3.replace(url="http://www.example.com/2", body='', meta={}, dont_filter=False)
self.assertEqual(r4.url, "http://www.example.com/2")
self.assertEqual(r4.body, '')
self.assertEqual(r4.meta, {})
assert r4.dont_filter is False
def test_method_always_str(self):
r = self.request_class("http://www.example.com", method=u"POST")
assert isinstance(r.method, str)
def test_immutable_attributes(self):
r = self.request_class("http://example.com")
self.assertRaises(AttributeError, setattr, r, 'url', 'http://example2.com')
self.assertRaises(AttributeError, setattr, r, 'body', 'xxx')
class FormRequestTest(RequestTest):
request_class = FormRequest
def assertSortedEqual(self, first, second, msg=None):
return self.assertEqual(sorted(first), sorted(second), msg)
def test_empty_formdata(self):
r1 = self.request_class("http://www.example.com", formdata={})
self.assertEqual(r1.body, '')
def test_default_encoding(self):
# using default encoding (utf-8)
data = {'one': 'two', 'price': '\xc2\xa3 100'}
r2 = self.request_class("http://www.example.com", formdata=data)
self.assertEqual(r2.method, 'POST')
self.assertEqual(r2.encoding, 'utf-8')
self.assertSortedEqual(r2.body.split('&'),
'price=%C2%A3+100&one=two'.split('&'))
self.assertEqual(r2.headers['Content-Type'], 'application/x-www-form-urlencoded')
def test_custom_encoding(self):
data = {'price': u'\xa3 100'}
r3 = self.request_class("http://www.example.com", formdata=data, encoding='latin1')
self.assertEqual(r3.encoding, 'latin1')
self.assertEqual(r3.body, 'price=%A3+100')
def test_multi_key_values(self):
# using multiples values for a single key
data = {'price': u'\xa3 100', 'colours': ['red', 'blue', 'green']}
r3 = self.request_class("http://www.example.com", formdata=data)
self.assertSortedEqual(r3.body.split('&'),
'colours=red&colours=blue&colours=green&price=%C2%A3+100'.split('&'))
def test_from_response_post(self):
response = _buildresponse(
"""<form action="post.php" method="POST">
<input type="hidden" name="test" value="val1">
<input type="hidden" name="test" value="val2">
<input type="hidden" name="test2" value="xxx">
</form>""",
url="http://www.example.com/this/list.html")
req = self.request_class.from_response(response,
formdata={'one': ['two', 'three'], 'six': 'seven'})
self.assertEqual(req.method, 'POST')
self.assertEqual(req.headers['Content-type'], 'application/x-www-form-urlencoded')
self.assertEqual(req.url, "http://www.example.com/this/post.php")
fs = _qs(req)
self.assertEqual(set(fs["test"]), set(["val1", "val2"]))
self.assertEqual(set(fs["one"]), set(["two", "three"]))
self.assertEqual(fs['test2'], ['xxx'])
self.assertEqual(fs['six'], ['seven'])
def test_from_response_extra_headers(self):
response = _buildresponse(
"""<form action="post.php" method="POST">
<input type="hidden" name="test" value="val1">
<input type="hidden" name="test" value="val2">
<input type="hidden" name="test2" value="xxx">
</form>""")
req = self.request_class.from_response(response,
formdata={'one': ['two', 'three'], 'six': 'seven'},
headers={"Accept-Encoding": "gzip,deflate"})
self.assertEqual(req.method, 'POST')
self.assertEqual(req.headers['Content-type'], 'application/x-www-form-urlencoded')
self.assertEqual(req.headers['Accept-Encoding'], 'gzip,deflate')
def test_from_response_get(self):
response = _buildresponse(
"""<form action="get.php" method="GET">
<input type="hidden" name="test" value="val1">
<input type="hidden" name="test" value="val2">
<input type="hidden" name="test2" value="xxx">
</form>""",
url="http://www.example.com/this/list.html")
r1 = self.request_class.from_response(response,
formdata={'one': ['two', 'three'], 'six': 'seven'})
self.assertEqual(r1.method, 'GET')
self.assertEqual(urlparse(r1.url).hostname, "www.example.com")
self.assertEqual(urlparse(r1.url).path, "/this/get.php")
fs = _qs(r1)
self.assertEqual(set(fs['test']), set(['val1', 'val2']))
self.assertEqual(set(fs['one']), set(['two', 'three']))
self.assertEqual(fs['test2'], ['xxx'])
self.assertEqual(fs['six'], ['seven'])
def test_from_response_override_params(self):
response = _buildresponse(
"""<form action="get.php" method="POST">
<input type="hidden" name="one" value="1">
<input type="hidden" name="two" value="3">
</form>""")
req = self.request_class.from_response(response, formdata={'two': '2'})
fs = _qs(req)
self.assertEqual(fs['one'], ['1'])
self.assertEqual(fs['two'], ['2'])
def test_from_response_override_method(self):
response = _buildresponse(
'''<html><body>
<form action="/app"></form>
</body></html>''')
request = FormRequest.from_response(response)
self.assertEqual(request.method, 'GET')
request = FormRequest.from_response(response, method='POST')
self.assertEqual(request.method, 'POST')
def test_from_response_override_url(self):
response = _buildresponse(
'''<html><body>
<form action="/app"></form>
</body></html>''')
request = FormRequest.from_response(response)
self.assertEqual(request.url, 'http://example.com/app')
request = FormRequest.from_response(response, url='http://foo.bar/absolute')
self.assertEqual(request.url, 'http://foo.bar/absolute')
request = FormRequest.from_response(response, url='/relative')
self.assertEqual(request.url, 'http://example.com/relative')
def test_from_response_submit_first_clickable(self):
response = _buildresponse(
"""<form action="get.php" method="GET">
<input type="submit" name="clickable1" value="clicked1">
<input type="hidden" name="one" value="1">
<input type="hidden" name="two" value="3">
<input type="submit" name="clickable2" value="clicked2">
</form>""")
req = self.request_class.from_response(response, formdata={'two': '2'})
fs = _qs(req)
self.assertEqual(fs['clickable1'], ['clicked1'])
self.assertFalse('clickable2' in fs, fs)
self.assertEqual(fs['one'], ['1'])
self.assertEqual(fs['two'], ['2'])
def test_from_response_submit_not_first_clickable(self):
response = _buildresponse(
"""<form action="get.php" method="GET">
<input type="submit" name="clickable1" value="clicked1">
<input type="hidden" name="one" value="1">
<input type="hidden" name="two" value="3">
<input type="submit" name="clickable2" value="clicked2">
</form>""")
req = self.request_class.from_response(response, formdata={'two': '2'}, \
clickdata={'name': 'clickable2'})
fs = _qs(req)
self.assertEqual(fs['clickable2'], ['clicked2'])
self.assertFalse('clickable1' in fs, fs)
self.assertEqual(fs['one'], ['1'])
self.assertEqual(fs['two'], ['2'])
def test_from_response_dont_submit_image_as_input(self):
response = _buildresponse(
"""<form>
<input type="hidden" name="i1" value="i1v">
<input type="image" name="i2" src="http://my.image.org/1.jpg">
<input type="submit" name="i3" value="i3v">
</form>""")
req = self.request_class.from_response(response, dont_click=True)
fs = _qs(req)
self.assertEqual(fs, {'i1': ['i1v']})
def test_from_response_dont_submit_reset_as_input(self):
response = _buildresponse(
"""<form>
<input type="hidden" name="i1" value="i1v">
<input type="text" name="i2" value="i2v">
<input type="reset" name="resetme">
<input type="submit" name="i3" value="i3v">
</form>""")
req = self.request_class.from_response(response, dont_click=True)
fs = _qs(req)
self.assertEqual(fs, {'i1': ['i1v'], 'i2': ['i2v']})
def test_from_response_multiple_clickdata(self):
response = _buildresponse(
"""<form action="get.php" method="GET">
<input type="submit" name="clickable" value="clicked1">
<input type="submit" name="clickable" value="clicked2">
<input type="hidden" name="one" value="clicked1">
<input type="hidden" name="two" value="clicked2">
</form>""")
req = self.request_class.from_response(response, \
clickdata={'name': 'clickable', 'value': 'clicked2'})
fs = _qs(req)
self.assertEqual(fs['clickable'], ['clicked2'])
self.assertEqual(fs['one'], ['clicked1'])
self.assertEqual(fs['two'], ['clicked2'])
def test_from_response_unicode_clickdata(self):
response = _buildresponse(
u"""<form action="get.php" method="GET">
<input type="submit" name="price in \u00a3" value="\u00a3 1000">
<input type="submit" name="price in \u20ac" value="\u20ac 2000">
<input type="hidden" name="poundsign" value="\u00a3">
<input type="hidden" name="eurosign" value="\u20ac">
</form>""")
req = self.request_class.from_response(response, \
clickdata={'name': u'price in \u00a3'})
fs = _qs(req)
self.assertTrue(fs[u'price in \u00a3'.encode('utf-8')])
def test_from_response_multiple_forms_clickdata(self):
response = _buildresponse(
"""<form name="form1">
<input type="submit" name="clickable" value="clicked1">
<input type="hidden" name="field1" value="value1">
</form>
<form name="form2">
<input type="submit" name="clickable" value="clicked2">
<input type="hidden" name="field2" value="value2">
</form>
""")
req = self.request_class.from_response(response, formname='form2', \
clickdata={'name': 'clickable'})
fs = _qs(req)
self.assertEqual(fs['clickable'], ['clicked2'])
self.assertEqual(fs['field2'], ['value2'])
self.assertFalse('field1' in fs, fs)
def test_from_response_override_clickable(self):
response = _buildresponse('''<form><input type="submit" name="clickme" value="one"> </form>''')
req = self.request_class.from_response(response, \
formdata={'clickme': 'two'}, clickdata={'name': 'clickme'})
fs = _qs(req)
self.assertEqual(fs['clickme'], ['two'])
def test_from_response_dont_click(self):
response = _buildresponse(
"""<form action="get.php" method="GET">
<input type="submit" name="clickable1" value="clicked1">
<input type="hidden" name="one" value="1">
<input type="hidden" name="two" value="3">
<input type="submit" name="clickable2" value="clicked2">
</form>""")
r1 = self.request_class.from_response(response, dont_click=True)
fs = _qs(r1)
self.assertFalse('clickable1' in fs, fs)
self.assertFalse('clickable2' in fs, fs)
def test_from_response_ambiguous_clickdata(self):
response = _buildresponse(
"""
<form action="get.php" method="GET">
<input type="submit" name="clickable1" value="clicked1">
<input type="hidden" name="one" value="1">
<input type="hidden" name="two" value="3">
<input type="submit" name="clickable2" value="clicked2">
</form>""")
self.assertRaises(ValueError, self.request_class.from_response,
response, clickdata={'type': 'submit'})
def test_from_response_non_matching_clickdata(self):
response = _buildresponse(
"""<form>
<input type="submit" name="clickable" value="clicked">
</form>""")
self.assertRaises(ValueError, self.request_class.from_response,
response, clickdata={'nonexistent': 'notme'})
def test_from_response_nr_index_clickdata(self):
response = _buildresponse(
"""<form>
<input type="submit" name="clickable1" value="clicked1">
<input type="submit" name="clickable2" value="clicked2">
</form>
""")
req = self.request_class.from_response(response, clickdata={'nr': 1})
fs = _qs(req)
self.assertIn('clickable2', fs)
self.assertNotIn('clickable1', fs)
def test_from_response_invalid_nr_index_clickdata(self):
response = _buildresponse(
"""<form>
<input type="submit" name="clickable" value="clicked">
</form>
""")
self.assertRaises(ValueError, self.request_class.from_response,
response, clickdata={'nr': 1})
def test_from_response_errors_noform(self):
response = _buildresponse("""<html></html>""")
self.assertRaises(ValueError, self.request_class.from_response, response)
def test_from_response_invalid_html5(self):
response = _buildresponse("""<!DOCTYPE html><body></html><form>"""
"""<input type="text" name="foo" value="xxx">"""
"""</form></body></html>""")
req = self.request_class.from_response(response, formdata={'bar': 'buz'})
fs = _qs(req)
self.assertEqual(fs, {'foo': ['xxx'], 'bar': ['buz']})
def test_from_response_errors_formnumber(self):
response = _buildresponse(
"""<form action="get.php" method="GET">
<input type="hidden" name="test" value="val1">
<input type="hidden" name="test" value="val2">
<input type="hidden" name="test2" value="xxx">
</form>""")
self.assertRaises(IndexError, self.request_class.from_response, response, formnumber=1)
def test_from_response_noformname(self):
response = _buildresponse(
"""<form action="post.php" method="POST">
<input type="hidden" name="one" value="1">
<input type="hidden" name="two" value="2">
</form>""")
r1 = self.request_class.from_response(response, formdata={'two':'3'})
self.assertEqual(r1.method, 'POST')
self.assertEqual(r1.headers['Content-type'], 'application/x-www-form-urlencoded')
fs = _qs(r1)
self.assertEqual(fs, {'one': ['1'], 'two': ['3']})
def test_from_response_formname_exists(self):
response = _buildresponse(
"""<form action="post.php" method="POST">
<input type="hidden" name="one" value="1">
<input type="hidden" name="two" value="2">
</form>
<form name="form2" action="post.php" method="POST">
<input type="hidden" name="three" value="3">
<input type="hidden" name="four" value="4">
</form>""")
r1 = self.request_class.from_response(response, formname="form2")
self.assertEqual(r1.method, 'POST')
fs = _qs(r1)
self.assertEqual(fs, {'four': ['4'], 'three': ['3']})
def test_from_response_formname_notexist(self):
response = _buildresponse(
"""<form name="form1" action="post.php" method="POST">
<input type="hidden" name="one" value="1">
</form>
<form name="form2" action="post.php" method="POST">
<input type="hidden" name="two" value="2">
</form>""")
r1 = self.request_class.from_response(response, formname="form3")
self.assertEqual(r1.method, 'POST')
fs = _qs(r1)
self.assertEqual(fs, {'one': ['1']})
def test_from_response_formname_errors_formnumber(self):
response = _buildresponse(
"""<form name="form1" action="post.php" method="POST">
<input type="hidden" name="one" value="1">
</form>
<form name="form2" action="post.php" method="POST">
<input type="hidden" name="two" value="2">
</form>""")
self.assertRaises(IndexError, self.request_class.from_response, \
response, formname="form3", formnumber=2)
def test_from_response_select(self):
res = _buildresponse(
'''<form>
<select name="i1">
<option value="i1v1">option 1</option>
<option value="i1v2" selected>option 2</option>
</select>
<select name="i2">
<option value="i2v1">option 1</option>
<option value="i2v2">option 2</option>
</select>
<select>
<option value="i3v1">option 1</option>
<option value="i3v2">option 2</option>
</select>
<select name="i4" multiple>
<option value="i4v1">option 1</option>
<option value="i4v2" selected>option 2</option>
<option value="i4v3" selected>option 3</option>
</select>
<select name="i5" multiple>
<option value="i5v1">option 1</option>
<option value="i5v2">option 2</option>
</select>
<select name="i6"></select>
<select name="i7"/>
</form>''')
req = self.request_class.from_response(res)
fs = _qs(req)
self.assertEqual(fs, {'i1': ['i1v2'], 'i2': ['i2v1'], 'i4': ['i4v2', 'i4v3']})
def test_from_response_radio(self):
res = _buildresponse(
'''<form>
<input type="radio" name="i1" value="i1v1">
<input type="radio" name="i1" value="iv2" checked>
<input type="radio" name="i2" checked>
<input type="radio" name="i2">
<input type="radio" name="i3" value="i3v1">
<input type="radio" name="i3">
<input type="radio" value="i4v1">
<input type="radio">
</form>''')
req = self.request_class.from_response(res)
fs = _qs(req)
self.assertEqual(fs, {'i1': ['iv2'], 'i2': ['on']})
def test_from_response_checkbox(self):
res = _buildresponse(
'''<form>
<input type="checkbox" name="i1" value="i1v1">
<input type="checkbox" name="i1" value="iv2" checked>
<input type="checkbox" name="i2" checked>
<input type="checkbox" name="i2">
<input type="checkbox" name="i3" value="i3v1">
<input type="checkbox" name="i3">
<input type="checkbox" value="i4v1">
<input type="checkbox">
</form>''')
req = self.request_class.from_response(res)
fs = _qs(req)
self.assertEqual(fs, {'i1': ['iv2'], 'i2': ['on']})
def test_from_response_input_text(self):
res = _buildresponse(
'''<form>
<input type="text" name="i1" value="i1v1">
<input type="text" name="i2">
<input type="text" value="i3v1">
<input type="text">
</form>''')
req = self.request_class.from_response(res)
fs = _qs(req)
self.assertEqual(fs, {'i1': ['i1v1'], 'i2': ['']})
def test_from_response_input_hidden(self):
res = _buildresponse(
'''<form>
<input type="hidden" name="i1" value="i1v1">
<input type="hidden" name="i2">
<input type="hidden" value="i3v1">
<input type="hidden">
</form>''')
req = self.request_class.from_response(res)
fs = _qs(req)
self.assertEqual(fs, {'i1': ['i1v1'], 'i2': ['']})
def test_from_response_input_textarea(self):
res = _buildresponse(
'''<form>
<textarea name="i1">i1v</textarea>
<textarea name="i2"></textarea>
<textarea name="i3"/>
<textarea>i4v</textarea>
</form>''')
req = self.request_class.from_response(res)
fs = _qs(req)
self.assertEqual(fs, {'i1': ['i1v'], 'i2': [''], 'i3': ['']})
def test_from_response_descendants(self):
res = _buildresponse(
'''<form>
<div>
<fieldset>
<input type="text" name="i1">
<select name="i2">
<option value="v1" selected>
</select>
</fieldset>
<input type="radio" name="i3" value="i3v2" checked>
<input type="checkbox" name="i4" value="i4v2" checked>
<textarea name="i5"></textarea>
<input type="hidden" name="h1" value="h1v">
</div>
<input type="hidden" name="h2" value="h2v">
</form>''')
req = self.request_class.from_response(res)
fs = _qs(req)
self.assertEqual(set(fs), set(['h2', 'i2', 'i1', 'i3', 'h1', 'i5', 'i4']))
def test_from_response_xpath(self):
response = _buildresponse(
"""<form action="post.php" method="POST">
<input type="hidden" name="one" value="1">
<input type="hidden" name="two" value="2">
</form>
<form action="post2.php" method="POST">
<input type="hidden" name="three" value="3">
<input type="hidden" name="four" value="4">
</form>""")
r1 = self.request_class.from_response(response, formxpath="//form[@action='post.php']")
fs = _qs(r1)
self.assertEqual(fs['one'], ['1'])
r1 = self.request_class.from_response(response, formxpath="//form/input[@name='four']")
fs = _qs(r1)
self.assertEqual(fs['three'], ['3'])
self.assertRaises(ValueError, self.request_class.from_response,
response, formxpath="//form/input[@name='abc']")
def _buildresponse(body, **kwargs):
kwargs.setdefault('body', body)
kwargs.setdefault('url', 'http://example.com')
kwargs.setdefault('encoding', 'utf-8')
return HtmlResponse(**kwargs)
def _qs(req):
if req.method == 'POST':
qs = req.body
else:
qs = req.url.partition('?')[2]
return cgi.parse_qs(qs, True)
class XmlRpcRequestTest(RequestTest):
request_class = XmlRpcRequest
default_method = 'POST'
default_headers = {'Content-Type': ['text/xml']}
def _test_request(self, **kwargs):
r = self.request_class('http://scrapytest.org/rpc2', **kwargs)
self.assertEqual(r.headers['Content-Type'], 'text/xml')
self.assertEqual(r.body, xmlrpclib.dumps(**kwargs))
self.assertEqual(r.method, 'POST')
self.assertEqual(r.encoding, kwargs.get('encoding', 'utf-8'))
self.assertTrue(r.dont_filter, True)
def test_xmlrpc_dumps(self):
self._test_request(params=('value',))
self._test_request(params=('username', 'password'), methodname='login')
self._test_request(params=('response', ), methodresponse='login')
self._test_request(params=(u'pas\xa3',), encoding='utf-8')
self._test_request(params=(u'pas\xa3',), encoding='latin')
self._test_request(params=(None,), allow_none=1)
self.assertRaises(TypeError, self._test_request)
self.assertRaises(TypeError, self._test_request, params=(None,))
if __name__ == "__main__":
unittest.main()
| bsd-3-clause |
scottpurdy/nupic | examples/opf/simple_server/model_params.py | 10 | 9318 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2015, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
MODEL_PARAMS = {
# Type of model that the rest of these parameters apply to.
'model': "HTMPrediction",
# Version that specifies the format of the config.
'version': 1,
# Intermediate variables used to compute fields in modelParams and also
# referenced from the control section.
'aggregationInfo': {'days': 0,
'fields': [('consumption', 'sum')],
'hours': 1,
'microseconds': 0,
'milliseconds': 0,
'minutes': 0,
'months': 0,
'seconds': 0,
'weeks': 0,
'years': 0},
'predictAheadTime': None,
# Model parameter dictionary.
'modelParams': {
# The type of inference that this model will perform
'inferenceType': 'TemporalAnomaly',
'sensorParams': {
# Sensor diagnostic output verbosity control;
# if > 0: sensor region will print out on screen what it's sensing
# at each step 0: silent; >=1: some info; >=2: more info;
# >=3: even more info (see compute() in py/regions/RecordSensor.py)
'verbosity' : 0,
# Include the encoders we use
'encoders': {
u'consumption': {
'fieldname': u'consumption',
'resolution': 0.88,
'seed': 1,
'name': u'consumption',
'type': 'RandomDistributedScalarEncoder',
},
'timestamp_timeOfDay': { 'fieldname': u'timestamp',
'name': u'timestamp_timeOfDay',
'timeOfDay': (21, 1),
'type': 'DateEncoder'},
'timestamp_weekend': { 'fieldname': u'timestamp',
'name': u'timestamp_weekend',
'type': 'DateEncoder',
'weekend': 21}
},
# A dictionary specifying the period for automatically-generated
# resets from a RecordSensor;
#
# None = disable automatically-generated resets (also disabled if
# all of the specified values evaluate to 0).
# Valid keys is the desired combination of the following:
# days, hours, minutes, seconds, milliseconds, microseconds, weeks
#
# Example for 1.5 days: sensorAutoReset = dict(days=1,hours=12),
#
# (value generated from SENSOR_AUTO_RESET)
'sensorAutoReset' : None,
},
'spEnable': True,
'spParams': {
# SP diagnostic output verbosity control;
# 0: silent; >=1: some info; >=2: more info;
'spVerbosity' : 0,
# Spatial Pooler implementation selector.
# Options: 'py', 'cpp' (speed optimized, new)
'spatialImp' : 'cpp',
'globalInhibition': 1,
# Number of cell columns in the cortical region (same number for
# SP and TM)
# (see also tpNCellsPerCol)
'columnCount': 2048,
'inputWidth': 0,
# SP inhibition control (absolute value);
# Maximum number of active columns in the SP region's output (when
# there are more, the weaker ones are suppressed)
'numActiveColumnsPerInhArea': 40,
'seed': 1956,
# potentialPct
# What percent of the columns's receptive field is available
# for potential synapses.
'potentialPct': 0.85,
# The default connected threshold. Any synapse whose
# permanence value is above the connected threshold is
# a "connected synapse", meaning it can contribute to the
# cell's firing. Typical value is 0.10.
'synPermConnected': 0.1,
'synPermActiveInc': 0.04,
'synPermInactiveDec': 0.005,
},
# Controls whether TM is enabled or disabled;
# TM is necessary for making temporal predictions, such as predicting
# the next inputs. Without TM, the model is only capable of
# reconstructing missing sensor inputs (via SP).
'tmEnable' : True,
'tmParams': {
# TM diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
# (see verbosity in nupic/trunk/py/nupic/research/backtracking_tm.py and backtracking_tm_cpp.py)
'verbosity': 0,
# Number of cell columns in the cortical region (same number for
# SP and TM)
# (see also tpNCellsPerCol)
'columnCount': 2048,
# The number of cells (i.e., states), allocated per column.
'cellsPerColumn': 32,
'inputWidth': 2048,
'seed': 1960,
# Temporal Pooler implementation selector (see _getTPClass in
# CLARegion.py).
'temporalImp': 'cpp',
# New Synapse formation count
# NOTE: If None, use spNumActivePerInhArea
#
# TODO: need better explanation
'newSynapseCount': 20,
# Maximum number of synapses per segment
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TM
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSynapsesPerSegment': 32,
# Maximum number of segments per cell
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TM
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSegmentsPerCell': 128,
# Initial Permanence
# TODO: need better explanation
'initialPerm': 0.21,
# Permanence Increment
'permanenceInc': 0.1,
# Permanence Decrement
# If set to None, will automatically default to tpPermanenceInc
# value.
'permanenceDec' : 0.1,
'globalDecay': 0.0,
'maxAge': 0,
# Minimum number of active synapses for a segment to be considered
# during search for the best-matching segments.
# None=use default
# Replaces: tpMinThreshold
'minThreshold': 12,
# Segment activation threshold.
# A segment is active if it has >= tpSegmentActivationThreshold
# connected synapses that are active due to infActiveState
# None=use default
# Replaces: tpActivationThreshold
'activationThreshold': 16,
'outputType': 'normal',
# "Pay Attention Mode" length. This tells the TM how many new
# elements to append to the end of a learned sequence at a time.
# Smaller values are better for datasets with short sequences,
# higher values are better for datasets with long sequences.
'pamLength': 1,
},
'clParams': {
'regionName' : 'SDRClassifierRegion',
# Classifier diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
'verbosity' : 0,
# This controls how fast the classifier learns/forgets. Higher values
# make it adapt faster and forget older patterns faster.
'alpha': 0.0001,
# This is set after the call to updateConfigFromSubConfig and is
# computed from the aggregationInfo and predictAheadTime.
'steps': '1,5',
'implementation': 'py',
},
'anomalyParams': { u'anomalyCacheRecords': None,
u'autoDetectThreshold': None,
u'autoDetectWaitRecords': 2184},
'trainSPNetOnlyIfRequested': False,
},
}
| agpl-3.0 |
mrjaydee82/SinLessKernel-4.4.4 | toolchains/linaro-arm-eabi-4.10-master/share/gdb/system-gdbinit/elinos.py | 134 | 3080 | # Copyright (C) 2011-2014 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Configure GDB using the ELinOS environment."""
import os
import glob
import gdb
def warn(msg):
print "warning: %s" % msg
def get_elinos_environment():
"""Return the ELinOS environment.
If the ELinOS environment is properly set up, return a dictionary
which contains:
* The path to the ELinOS project at key 'project';
* The path to the ELinOS CDK at key 'cdk';
* The ELinOS target name at key 'target' (Eg. 'i486-linux');
* A list of Xenomai install prefixes (which could be empty, if
the ELinOS project does not include Xenomai) at key 'xenomai'.
If one of these cannot be found, print a warning; the corresponding
value in the returned dictionary will be None.
"""
result = {}
for key in ("project", "cdk", "target"):
var = "ELINOS_" + key.upper()
if var in os.environ:
result[key] = os.environ[var]
else:
warn("%s not set" % var)
result[key] = None
if result["project"] is not None:
result["xenomai"] = glob.glob(result["project"] + "/xenomai-[0-9.]*")
else:
result["xenomai"] = []
return result
def elinos_init():
"""Initialize debugger environment for ELinOS.
Let the debugger know where to find the ELinOS libraries on host. This
assumes that an ELinOS environment is properly set up. If some environment
variables are missing, warn about which library may be missing.
"""
elinos_env = get_elinos_environment()
solib_dirs = []
# System libraries
if None in (elinos_env[key] for key in ("cdk", "target")):
warn("ELinOS system libraries will not be loaded")
else:
solib_prefix = "%s/%s" % (elinos_env["cdk"], elinos_env["target"])
solib_dirs += ["%s/%s" % (solib_prefix, "lib")]
gdb.execute("set solib-absolute-prefix %s" % solib_prefix)
# Xenomai libraries. Those are optional, so have a lighter warning
# if they cannot be located.
if elinos_env["project"] is None:
warn("Xenomai libraries may not be loaded")
else:
for dir in elinos_env['xenomai']:
solib_dirs += ["%s/%s"
% (dir, "xenomai-build/usr/realtime/lib")]
if len(solib_dirs) != 0:
gdb.execute("set solib-search-path %s" % ":".join(solib_dirs))
if __name__ == "__main__":
elinos_init()
| gpl-2.0 |
amnona/heatsequer | heatsequer/experiment/expclass.py | 1 | 28528 | #!/usr/bin/env python
"""
heatsequer experiment class
"""
# amnonscript
__version__ = "0.9"
import heatsequer as hs
import os
import copy
import numpy as np
from pdb import set_trace as XXX
import time
import collections
class Experiment:
'''
experiment class holds the read data and metadata about the experiment, as well
as the command history
'''
# the static unique experiment id
experimentid=0
def __init__(self):
# the data matrix (non sparse)
self.data=[]
# True is data is sparse, False is data is not sparse
self.sparse=False
# the sample dictionary (double hash - sampleid and then mapping file field)
self.smap={}
# name of all the fields in the mapping data
self.fields=[]
# list of sampleids ordered according to the data matrix
self.samples=[]
# the sequences in the table
self.seqs=[]
# dictionary holding all the sequences and their position (for fast lookup)
self.seqdict={}
# taxonomies
self.tax=[]
# the hashed ids for the sequences
self.sids=[]
# the original name for each otu (from the biom table)
self.origotunames=[]
# original table name
self.tablefilename=''
# original mapping file
self.mapfilename=''
# name of the study (or the table file name without path)
self.studyname=''
# number of reads for each sample in the biom table
self.origreads=[]
# the original scaling factor used to convert the reads. how many original reads each normalized unit is equal to
# (i.e. if we load an experiment with 1k reads and normalize to 10k, scaling factor is 10)
self.scalingfactor=None
# the history of actions performed
self.filters=[]
# and the command list
self.commands=[]
# the complete sequence database
self.seqdb=None
# the cool sequences (manually curated) database
self.cdb=None
# the list of annotations to add to plot (for addplotmetadata)
self.plotmetadata=[]
# list of positions for horizontal lines (for diffexp etc.)
self.hlines=[]
# the tree structure of the sequences (from loadexptree)
self.tree=False
# the experiment type ('biom' or 'meta' for metabolite)
self.datatype=''
# the unqiue experiment id
self.uniqueid=0
# the md5 of the original data and mapping files loaded
# used for a unique id for the data in the manual curation database
self.datamd5=''
self.mapmd5=''
# both can be set via hs.getexpannotations()
# the list of annotations per sequence (key)
self.seqannotations=None
# the list of sequences per annotation (key)
self.annotationseqs=None
hs.Debug(0,'New experiment initialized')
# get a unique identifier and increase by 1
def getexperimentid(self):
Experiment.experimentid+=1
return Experiment.experimentid
def plotexp(self,**kwargs):
hs.plotexp(self,**kwargs)
def copyexp(expdat,todense=False):
"""
copy an experiment (duplicating the important fields)
but give it a unique identifier
Parameters
----------
expdat : Experiment
the experiment to copy
todense : bool (optional)
False (default) to not convert to dense, True to convert to dense
output:
newexp : Experiment
a deep copy of expdat
"""
newexp=copy.copy(expdat)
if todense:
newexp.data=expdat.data.todense()
newexp.sparse=False
else:
newexp.data=copy.deepcopy(expdat.data)
newexp.smap=copy.deepcopy(expdat.smap)
newexp.fields=copy.deepcopy(expdat.fields)
newexp.samples=copy.deepcopy(expdat.samples)
newexp.seqs=copy.deepcopy(expdat.seqs)
newexp.seqdict=copy.deepcopy(expdat.seqdict)
newexp.tax=copy.deepcopy(expdat.tax)
newexp.sids=copy.deepcopy(expdat.sids)
newexp.origotunames=copy.deepcopy(expdat.origotunames)
newexp.tablefilename=copy.deepcopy(expdat.tablefilename)
newexp.mapfilename=copy.deepcopy(expdat.mapfilename)
newexp.studyname=copy.deepcopy(expdat.studyname)
newexp.origreads=copy.deepcopy(expdat.origreads)
newexp.scalingfactor=copy.deepcopy(expdat.scalingfactor)
newexp.filters=copy.deepcopy(expdat.filters)
newexp.commands=copy.deepcopy(expdat.commands)
newexp.plotmetadata=copy.deepcopy(expdat.plotmetadata)
# nmewexp.tree=copy.deepcopy(expdat.tree)
newexp.datatype=copy.deepcopy(expdat.datatype)
newexp.hlines=copy.deepcopy(expdat.hlines)
newexp.seqannotations=copy.deepcopy(expdat.seqannotations)
newexp.annotationseqs=copy.deepcopy(expdat.annotationseqs)
# get a unique identifier for this experiment
newexp.uniqueid=newexp.getexperimentid()
return newexp
def hashseq(seq):
'''
calculate the hash value for a given sequence (for an almost unique sequence identifier)
used for the sid field in experiment
input:
seq : str
the sequence to hash
output:
hval : int
the hash value for the sequence
'''
hval=hs.mlhash(seq, emod=10000000)
return hval
def addcommand(expdat,command,params={},replaceparams={}):
'''
append a command string to the experiment command list from the command and the unique experiment id
"expXXX=command" where XXX is the unique experiment id
if params is supplied, use them as the function parameters, otherwise just use command
input:
expdat : experiment
the experiment for which to prepare the command
command : str
the command
params : dict
if empty, just append the command
if dict, append command+"("+params+")"
replaceparams : dict
a dict of parameters whos values need to be replaced by an experimentid.
key is parameter, value is experiment, from where the experimentid will be taken
'''
newcommand='exp%d=hs.%s' % (expdat.uniqueid,command)
if len(params)>0:
# if replaceparams:
# for rk,rv in replaceparams.items():
# if rk not in params:
# hs.Debug(9,'replacement parameter %s not in params' % rk)
# params[rk]='exp%d' % rv.uniqueid
newcommand+='('
for k,v in params.items():
if k in replaceparams:
v='exp%d' % v.uniqueid
else:
v=repr(v)
newcommand+='%s=%s,' % (k,str(v))
newcommand=newcommand[:-1]+')'
expdat.commands.append(newcommand)
def reordersamples(exp,newpos,inplace=False):
"""
reorder the samples of the experiment
input:
exp - the experiment
newpos - array - the new positions (can skip positions to delete them)
output:
newexp - the new experiment
"""
if inplace:
newexp=exp
else:
newexp=copyexp(exp)
# newexp=copy.deepcopy(exp)
newexp.data=newexp.data[:,newpos]
newexp.samples=hs.reorder(newexp.samples,newpos)
newexp.origreads=hs.reorder(newexp.origreads,newpos)
if newexp.scalingfactor is not None:
newexp.scalingfactor=newexp.scalingfactor[newpos]
return newexp
def reorderbacteria(exp,order,inplace=False):
"""
reorder the bacteria in an experiment (can delete if bacteria not in new order)
input:
exp - the experiment
order - the new order
output:
newexp
"""
if inplace:
newexp=exp
else:
newexp=copyexp(exp)
# newexp=copy.deepcopy(exp)
newexp.data=newexp.data[order,:]
newexp.seqs=hs.reorder(newexp.seqs,order)
newexp.seqdict={}
for idx,cseq in enumerate(newexp.seqs):
newexp.seqdict[cseq]=idx
newexp.tax=hs.reorder(newexp.tax,order)
newexp.sids=hs.reorder(newexp.sids,order)
# filter the annotations if needed
if exp.seqannotations is not None:
seqannotations={}
annotationseqs=collections.defaultdict(list)
for cseq in newexp.seqs:
seqannotations[cseq]=newexp.seqannotations[cseq]
for cinfo in seqannotations[cseq]:
annotationseqs[cinfo].append(cseq)
newexp.seqannotations=seqannotations
newexp.annotationseqs=annotationseqs
return newexp
def getfieldvals(expdat,field,ounique=False):
"""
get a list of the field values in all samples
input:
expdat : Experiment
field : string
name of the field to get the values from
ounique : bool
True to get unique values, False to get all
"""
vals=[]
for cid in expdat.samples:
vals.append(expdat.smap[cid][field])
if ounique:
vals=list(set(vals))
return vals
def joinfields(expdat,field1,field2,newfield):
"""
join 2 fields to create a new field for each sample
input:
expdat : Experiment
field1,field2 : string
name of the 2 fields to join
newfield : string
name of new field to add
"""
params=locals()
for csamp in expdat.samples:
expdat.smap[csamp][newfield]=expdat.smap[csamp][field1]+';'+expdat.smap[csamp][field2]
expdat.fields.append(newfield)
expdat.filters.append("join fields %s, %s to new field %s" % (field1,field2,newfield))
hs.addcommand(expdat,"joinfields",params=params,replaceparams={'expdat':expdat})
return expdat
def joinexperiments(exp1,exp2,missingval='NA',origfieldname='origexp',addbefore=False):
"""
join 2 experiments into a new experiment. adding a new field origfieldname
input:
exp1,exp2 - the experiments to join
missingval - string to put when field not in mapping file of one of the experiments
origfieldname - name of the new field to add which contains the original experiment name
addbefore : bool (optional)
False (default) to add '-1'/'-2' after sampleid if similar ids in both experiments
True to add '-1'/'-2' after sampleid if similar ids in both experiments
"""
params=locals()
# test if same sampleid exists in both experiments. if so, add "-1" and "-2" to sampleid
samp1=set(exp1.samples)
samp2=set(exp2.samples)
if len(samp1.intersection(samp2))>0:
hs.Debug(6,'same sampleID - renaming samples')
exp1=hs.renamesamples(exp1,'-1',addbefore=addbefore)
exp2=hs.renamesamples(exp2,'-2',addbefore=addbefore)
# join the sequences of both experiments
# ASSUMING SAME SEQ LENGTH!!!!
allseqs=list(set(exp1.seqs) | set(exp2.seqs))
alldict={}
alltax=[]
allids=[]
for idx,cseq in enumerate(allseqs):
alldict[cseq]=idx
# make the new joined data for each experiment
dat1=np.zeros((len(allseqs),np.size(exp1.data,1)))
for idx,cseq in enumerate(allseqs):
if cseq in exp1.seqdict:
dat1[idx,:]=exp1.data[exp1.seqdict[cseq],:]
alltax.append(exp1.tax[exp1.seqdict[cseq]])
allids.append(exp1.sids[exp1.seqdict[cseq]])
else:
alltax.append(exp2.tax[exp2.seqdict[cseq]])
allids.append(exp2.sids[exp2.seqdict[cseq]])
dat2=np.zeros((len(allseqs),np.size(exp2.data,1)))
for idx,cseq in enumerate(allseqs):
if cseq in exp2.seqdict:
dat2[idx,:]=exp2.data[exp2.seqdict[cseq],:]
newexp=hs.copyexp(exp1)
# concatenate the reads
newexp.data=np.concatenate((dat1,dat2), axis=1)
newexp.seqdict=alldict
newexp.seqs=allseqs
newexp.tax=alltax
newexp.sids=allids
newexp.sids=newexp.seqs
newexp.samples = list(exp1.samples) + list(exp2.samples)
newexp.origreads=exp1.origreads+exp2.origreads
newexp.scalingfactor=np.hstack([exp1.scalingfactor,exp2.scalingfactor])
newexp.fields=list(set(exp1.fields+exp2.fields))
for cfield in newexp.fields:
if cfield in exp1.fields:
continue
for csamp in exp1.samples:
newexp.smap[csamp][cfield]=missingval
for csamp in exp2.samples:
newexp.smap[csamp]={}
for cfield in newexp.fields:
if cfield in exp2.fields:
newexp.smap[csamp][cfield]=exp2.smap[csamp][cfield]
else:
newexp.smap[csamp][cfield]=missingval
for csamp in exp1.samples:
if origfieldname in exp1.fields:
cname=exp1.smap[csamp][origfieldname]
else:
cname=exp1.studyname
newexp.smap[csamp][origfieldname]=cname
for csamp in exp2.samples:
if origfieldname in exp2.fields:
cname=exp2.smap[csamp][origfieldname]
else:
cname=exp2.studyname
newexp.smap[csamp][origfieldname]=cname
if origfieldname not in newexp.fields:
newexp.fields.append(origfieldname)
newexp.filters.append('joined with %s' % exp2.studyname)
hs.addcommand(newexp,"joinexperiments",params=params,replaceparams={'exp1':exp1,'exp2':exp2})
return newexp
def clipseqs(expdat,startpos,addseq='TAC'):
"""
clip the first nucleotides in all sequences in experiment
to fix offset in sequencing
input:
expdat
startpos - the position to start from (0 indexed) or negative to add nucleotides
addseq - the sequence to add (just a guess) if startpos is negative
output:
newexp - new experiment with all sequences clipped and joined identical sequences
"""
params=locals()
newexp=copy.deepcopy(expdat)
newseqs=[]
newdict={}
keeppos=[]
for idx,cseq in enumerate(newexp.seqs):
if startpos>=0:
cseq=cseq[startpos:]
else:
cseq=addseq[:abs(startpos)]+cseq
cseq=cseq[:len(expdat.seqs[0])]
if cseq in newdict:
newexp.data[newdict[cseq],:] += newexp.data[idx,:]
else:
newdict[cseq]=idx
newseqs.append(cseq)
keeppos.append(idx)
newexp=reorderbacteria(newexp,keeppos)
newexp.seqs=newseqs
newexp.seqdict=newdict
hs.addcommand(newexp,"clipseqs",params=params,replaceparams={'expdat':expdat})
newexp.filters.append("trim %d nucleotides" % startpos)
return newexp
def findsamples(expdat,field,value,exclude=False):
"""
return the positions of samples in expdat matching value in field
similar to filtersamples but returns a list of indices (for the data matrix)
input:
expdat
field - name of the field to test
value - the value to look for (or a list of values)
exclude - True to get positions without that value, False to get positions of the value
output:
pos - a list of positions matching the field/val (for use as indices in expdat.data)
"""
pos=[]
if not isinstance(value,list):
value=[value]
for cidx,csamp in enumerate(expdat.samples):
if expdat.smap[csamp][field] in value:
if not exclude:
pos.append(cidx)
else:
if exclude:
pos.append(cidx)
return pos
def zerobacteria(expdat,inplace=False):
"""
zero all the bacteria in an experiment (can then add insertbacteria)
input:
expdat : Experiment
inplace : bool
True to do inplace, False to make new copy
output:
newexp : Experiment
all bacteria have been removed
"""
if inplace:
newexp=expdat
else:
newexp=hs.copyexp(expdat)
newexp.data=np.zeros([0,len(newexp.samples)])
newexp.seqs=[]
newexp.tax=[]
newexp.seqdict={}
newexp.sids=[]
return newexp
def insertbacteria(expdat,freqs=[],seq="unknown",tax="unknown",logit=True):
"""
insert a new bacteria to an experiment
input:
expdat
freqs - the frequency of the bacteria in all samles of expdat or [] to add zeros
seq - the sequence of the new bacteria
tax - taxonomy of the new bacteria
logit - True to add command log/filter, False to not add (if called from other function)
output:
pos - position of the new bacteria
"""
params=locals()
if len(freqs)==0:
freqs=np.zeros([1,len(expdat.samples)])
expdat.data=np.vstack((expdat.data,freqs))
expdat.tax.append(tax)
if seq in expdat.seqdict:
hs.Debug(6,'Sequence already in experiment',seq)
# get a unique sequence
cid=0
while seq+str(cid) in expdat.seqdict:
cid+=1
# expdat.seqs.append()
seq=seq+str(cid)
expdat.seqs.append(seq)
expdat.seqdict[seq]=len(expdat.seqs)-1
expdat.sids.append(seq)
if logit:
expdat.filters.append("insert bacteria")
hs.addcommand(expdat,"insertbacteria",params=params,replaceparams={'expdat':expdat})
return expdat,len(expdat.seqs)-1
def addsubtrees(expdat,tree,inplace=False):
"""
add otus for all subtrees with the frequency being the sum of all bacteria in the subtree
input:
expdat - the experiment
tree - the tree for the experiment
inplace - if true, replace current experiment
output:
newexp - the new experiment with twice-1 number of otus
"""
params=locals()
# if not expdat.tree:
# hs.Debug(8,"No tree loaded for experiment")
# return False
if inplace:
newexp=expdat
else:
newexp=hs.copyexp(expdat)
subtrees=tree.subsets()
for csubtree in subtrees:
newname=""
newtax=""
numuse=0
newfreq=np.zeros([1,len(newexp.samples)])
for cbact in csubtree:
if cbact not in newexp.seqdict:
hs.Debug(4,'sequence not in seqdict',cbact)
continue
numuse+=1
cpos=newexp.seqdict[cbact]
newfreq+=newexp.data[cpos,:]
newname+='%d,' % cpos
if newtax=='':
newtax=newexp.tax[cpos]
else:
newtax=hs.common_start(newtax,newexp.tax[cpos])
# add only if we have 2 bacteria or more
if numuse>1:
if newname not in newexp.seqdict:
newexp,newpos=insertbacteria(newexp,freqs=newfreq,seq=newname,tax=newtax,logit=False)
newexp.filters.append("Add subtrees")
hs.addcommand(newexp,"addsubtrees",params=params,replaceparams={'expdat':expdat})
return(newexp)
def findseqsinexp(expdat,seqs):
"""
find sequences from seqs in expdat sequences and return the indices
input:
expdat
seqs - a list of sequences
output:
res - a list of indices where seqs are in expdat sequences
"""
res=[]
for cseq in seqs:
res.append(expdat.seqdict[cseq])
return res
# def samplemeanpervalue(expdat,field):
# """
# BETTER TO USE filtersimilarsamples!!!!
# create a new experiment, with 1 sample per value in field, containing the mean of all samples with that value
# input:
# expdat : Experiment
# field : string
# the field to use (i.e. 'ENV_MATTER')
# output:
# newexp : Experiment
# The new experiment with 1 sample per unique value of field
# """
# params=locals()
# uvals=hs.getfieldvals(expdat,field,ounique=True)
# vals=hs.getfieldvals(expdat,field,ounique=False)
# vdict=hs.listtodict(vals)
# nsamps=[]
# for cval in uvals:
# nsamps.append(vdict[cval][0])
# newexp=hs.reordersamples(expdat,nsamps)
# for idx,cval in enumerate(uvals):
# cdat=expdat.data[:,vdict[cval]]
# mv=np.mean(cdat,axis=1)
# newexp.data[:,idx]=mv
# newexp.filters.append('samplemeanpervalue for field %s' % field)
# hs.addcommand(newexp,"samplemeanpervalue",params=params,replaceparams={'expdat':expdat})
# return(newexp)
def convertdatefield(expdat,field,newfield,timeformat='%m/%d/%y %H:%M'):
"""
convert a field containing date/time to a numeric (seocds since epoch) field (create a new field for that)
input:
expdat : Experiment
the experiment to add the field to
field : string
name of the field containing the date/time format
newfield : string
name of the new field (with seconds since epoch)
timeformat : string
format of the date/time field (based on time format)
output:
newexp : Experiment
the experiment with the added time since epoch field
"""
params=locals()
newexp=hs.copyexp(expdat)
newexp.fields.append(newfield)
numfailed=0
for csamp in newexp.samples:
try:
ctime=time.mktime(time.strptime(newexp.smap[csamp][field],timeformat))
except:
ctime=0
numfailed+=1
newexp.smap[csamp][newfield]=str(ctime)
hs.Debug(6,'%d conversions failed' % numfailed)
newexp.filters.append('add time field %s (based on field %s)' % (newfield,field))
hs.addcommand(newexp,"convertdatefield",params=params,replaceparams={'expdat':expdat})
return(newexp)
def fieldtobact(expdat,field,bactname='',meanreads=1000,cutoff=0):
"""
convert values in a map file field to a new bacteria (to facilitate numeric analysis)
input:
expdat : Experiment
field : string
name of the field to convert
bactname : string
name of the new bacteria (empty to have similar to field name)
meanreads : int
the mean number of reads for the new field bacteria or None to not rescale
cutoff : int
the minimal value of the field per sample (otherwise replace with meanreads)
output:
newexp : Experiment
with added bacteria with the field vals as reads
"""
params=locals()
if len(bactname)==0:
bactname=field
fv=hs.getfieldvals(expdat,field)
vals=np.array(hs.tofloat(fv))
okpos=np.where(vals>=cutoff)[0]
badpos=np.where(vals<cutoff)[0]
if meanreads is not None:
scalefactor=np.mean(vals[okpos])
vals[okpos]=(vals[okpos]/scalefactor)*meanreads
vals[badpos]=meanreads
newexp=hs.copyexp(expdat)
hs.insertbacteria(newexp,vals,bactname,bactname,logit=False)
newexp.filters.append('add bacteria from map field %s' % field)
hs.addcommand(newexp,"fieldtobact",params=params,replaceparams={'expdat':expdat})
return(newexp)
def get_data_path(fn, subfolder='data'):
"""
Return path to filename ``fn`` in the data folder.
returns the joining of the heatsequerdir variable (set in __init__) and the subfolder and fn
"""
return os.path.join(hs.heatsequerdir,subfolder,fn)
def addmapfield(expdat,fieldname,defaultval='NA',inplace=False):
"""
add a new field to the mapping file
input:
expdat : Experiment
fieldname : str
name of the new field
defaultval : str
the value for all samples
inplace : bool
True to overwrite current experiment, False (default) to copy
output:
newexp : Experiment
with the new field added
"""
if inplace:
newexp=expdat
else:
newexp=hs.copyexp(expdat)
if fieldname in newexp.fields:
hs.Debug(8,'field %s already exists')
return newexp
newexp.fields.append(fieldname)
for csamp in newexp.samples:
newexp.smap[csamp][fieldname]=defaultval
return newexp
def changemapval(expdat,newfield,newval,oldfield,vals,inplace=False):
"""
change values of a field in the mapping file according to another field
input:
expdat : Experiment
newfield : name of the field to change the values in (from addmapfield?)
newval : the new value to put
oldfield : the field with the values to test
vals : a list of values, so newfield is set to newval only if the the value of oldfield is in the list
inplace : bool
True to overwrite current experiment, False (default) to copy
"""
if inplace:
newexp=expdat
else:
newexp=hs.copyexp(expdat)
for csamp in newexp.samples:
if newexp.smap[csamp][oldfield] in vals:
newexp.smap[csamp][newfield]=newval
return newexp
def getseqsamp(expdat,seq,samp,unnormalize=False):
"""
get the number of reads of a sequence/sample combination in the experiment
input:
expdat : ExpClass
the experiment
seq : str
the sequence to look for
samp : str
the sample name to look for
unnormalize : bool
False (default) to use normalized reads, True to un-normalize the result (to raw reads)
output:
reads : float
the number of reads of sequence seq in samples samp
"""
seqpos=expdat.seqdict[seq]
samppos=np.where(expdat.samples==samp)[0]
reads=expdat.data[seqpos,samppos]
if unnormalize:
reads=reads*expdat.origreads[samppos]/np.sum(expdat.data[:,samppos])
return reads
def addsample(expdat,sampleid,fieldvals={},missingval='NA',data=None):
"""
add a sample to the experiment
input:
expdat : Experiment
the experiment to add the sample to
sampleid : str
name of the sample
fieldvals : dict of (str: str)
dict (field: value) of mapping file field values
missingval : str
value to add for missing mapping file values
data : None of nparray
the reads per bacteria, or None to skip
output:
expdat : experiment
with the added sample
"""
hs.Debug(1,'Add sample %s to experiment' % sampleid)
if sampleid in expdat.samples:
hs.Debug('Sample %s already in experiment! aborting' % sampleid)
return expdat
# add the sample
expdat.samples.append(sampleid)
# and the mapping file values
expdat.smap[sampleid]={}
for cfield in expdat.fields:
if cfield in fieldvals:
expdat.smap[sampleid][cfield]=fieldvals[cfield]
else:
expdat.smap[sampleid][cfield]=missingval
if data is None:
data=np.zeros(np.shape(expdat.data)[0])
expdat.origreads.append(np.sum(data))
data=np.reshape(data,[len(data),1])
expdat.data=np.hstack([expdat.data,data])
return expdat
def taxtoseq(expdat,fixtax=False):
"""
put the taxonomy into the sequence field
input:
expdat : Experiment
fixtax: bool (optional)
False (default) to just copy, True to remove the k__ etc.
output:
newexp : Experiment
with seqs=taxonomies
"""
newexp=hs.copyexp(expdat)
newexp.seqs=newexp.tax
if fixtax:
newtax=[]
for ctax in newexp.tax:
cstr=''
cctax=ctax.split(';')
for clevel in range(7):
if len(cctax)>clevel:
cstr+=cctax[clevel][3:]
cstr+=';'
newtax.append(cstr)
newexp.seqs=newtax
newexp.seqdict={}
newseqs=[]
for idx,cseq in enumerate(newexp.seqs):
if cseq in newexp.seqdict:
hs.Debug(8,'found %s again' % cseq)
cseq=cseq+'-'+str(idx)
newseqs.append(cseq)
newexp.seqdict[cseq]=idx
newexp.seqs=newseqs
return(newexp)
def renamesamples(expdat,addstr,addbefore=True):
"""
rename all the samples in expdat by adding addbefore before or after the name of each sample
input:
expdat : Experiment
the experiment to change the sample names in
addstr : str
the string to add to each sampleid
addbefore : bool (optional)
True (default) to add addstr before each sampleid
False to add addstr after each sampleid
output:
newexp : Experiment
with new sample names
"""
newexp=hs.copyexp(expdat)
newids=[]
newmap={}
for csamp in newexp.samples:
if addbefore:
cnewid=addstr+csamp
else:
cnewid=csamp+addstr
newids.append(cnewid)
newmap[cnewid]={}
for ckey,cval in newexp.smap[csamp].items():
newmap[cnewid][ckey]=cval
newexp.samples=newids
newexp.smap=newmap
return newexp
def validateexp(expdat):
"""
test the validity of an experiment:
1. seqdict is correct
2. smap contains all the samples
3. smap fields are the same as fields
4. issparse is correct
5. taxonomy length is the same as the number of sequence
input:
expdat : Experiment
output:
isok : bool
True if experiment is validated, False if there is a problem
"""
# test the seqdict
# for idx,cseq in enumerate(expdat.seqs):
# if expdat.seqdict
def getheatsequerdir():
"""
Get the root directory of heatsequer
"""
return hs.heatsequerdir
def trimfieldnames(expdat,field,newfield,trimlen=6):
"""
trim experiment per sample field values to trimlen
input:
expdat: Experiment
field : str
name of the field to trim the values in
newfield : str
name of the field where to keep the trimmed values
trimlen : int
>0 : trim keeping first trimlen chars
<0 : trim keeping last -trimlen chars
output:
newexo : Experiment
with trimmed field values
"""
params=locals()
for csamp in expdat.samples:
cstr=expdat.smap[csamp][field]
if trimlen>0:
cstr=cstr[:trimlen]
else:
cstr=cstr[trimlen:]
expdat.smap[csamp][newfield]=cstr
expdat.fields.append(newfield)
expdat.filters.append('Trim field names field %s trimlen %d' % (field,trimlen))
hs.addcommand(expdat,"trimfieldnames",params=params,replaceparams={'expdat':expdat})
return expdat
def addfield(expdat,field,values):
"""
add a new field to the experiment and add the values to it
inplace
input:
expdat : experiment
field : str
name of the new field to add
values : list of str or str
the values to add. if str - put same value in all. if list - put in each sample the value
output:
expdat : experiment
with the new field added (NOTE: inplace)
"""
for idx,csamp in enumerate(expdat.samples):
if type(values)==str:
expdat.smap[csamp][field]=values
else:
expdat.smap[csamp][field]=values[idx]
expdat.fields.append(field)
def filtermapfields(expdat,fields=['#SampleID'],keep=True,inplace=False):
"""
filter fields from the experiment mapping data
input:
expdat : Experiment
fields : list of str
the list of the fields to keep/remove
keep : bool (optional)
True (default) to keep only the fields specified
False to remove the fields specified
inplace : bool (optional)
False (default) to create new experiment
True to replace in current experiment
output:
newexp : Experiment
with only the fields requested
"""
params=locals()
newsmap={}
newfields=set(expdat.fields)
if keep:
newfields=newfields.intersection(set(fields))
else:
newfields=newfields.difference(set(fields))
newfields.add('#SampleID')
for csamp in expdat.samples:
newsmap[csamp]={}
for cfield in newfields:
newsmap[csamp][cfield]=expdat.smap[csamp][cfield]
if inplace:
newexp=expdat
else:
newexp=hs.copyexp(expdat)
newexp.fields=list(newfields)
newexp.smap=newsmap
expdat.filters.append('filter map fields %s (keep=%s)' % (fields,keep))
hs.addcommand(expdat,"filtermapfields",params=params,replaceparams={'expdat':expdat})
return newexp
def expfromcalour(cexp):
'''
convert an experiment from calour to heatsequer
input:
cexp : calour experiment
'''
newexp=Experiment()
newexp.data=copy.copy(cexp.data).transpose()
newexp.samples=list(cexp.sample_metadata.index)
newexp.seqs=list(cexp.feature_metadata.index)
if 'taxonomy' in cexp.feature_metadata.columns:
newexp.tax=[';'.join(x) for x in cexp.feature_metadata['taxonomy']]
else:
newexp.tax=list(cexp.feature_metadata.index)
newexp.sids=list(cexp.feature_metadata.index)
newexp.origreads=np.sum(newexp.data,0)
newexp.fields=list(cexp.sample_metadata.columns)
for csamp in newexp.samples:
newexp.smap[csamp]={}
for cfield in newexp.fields:
newexp.smap[csamp][cfield]=cexp.sample_metadata.loc[csamp][cfield]
newexp.commands.append('From calour experiment')
newexp.commands.append(cexp.description)
return newexp
| bsd-3-clause |
j-carl/boto | boto/directconnect/exceptions.py | 148 | 1239 | # Copyright (c) 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
class DirectConnectClientException(Exception):
pass
class DirectConnectServerException(Exception):
pass
| mit |
maartenq/ansible | test/units/modules/source_control/test_gitlab_deploy_key.py | 12 | 8009 | # -*- coding: utf-8 -*-
# Copyright (c) 2018 Marcus Watkins <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from ansible.compat.tests.mock import patch
from ansible.modules.source_control import gitlab_deploy_key
from ansible.module_utils._text import to_bytes
from ansible.module_utils import basic
import pytest
import json
from units.modules.utils import set_module_args
fake_server_state = [
{
"id": 1,
"title": "Public key",
"key": 'ssh-rsa long/+base64//+string==',
"created_at": "2013-10-02T10:12:29Z",
"can_push": False
},
]
class FakeReader:
def __init__(self, object):
self.content = json.dumps(object, sort_keys=True)
def read(self):
return self.content
class AnsibleExitJson(Exception):
"""Exception class to be raised by module.exit_json and caught by the test case"""
pass
class AnsibleFailJson(Exception):
"""Exception class to be raised by module.fail_json and caught by the test case"""
pass
def exit_json(*args, **kwargs):
"""function to patch over exit_json; package return data into an exception"""
if 'changed' not in kwargs:
kwargs['changed'] = False
raise AnsibleExitJson(kwargs)
def fail_json(*args, **kwargs):
"""function to patch over fail_json; package return data into an exception"""
kwargs['failed'] = True
raise AnsibleFailJson(kwargs)
@pytest.fixture
def fetch_url_mock(mocker):
return mocker.patch('ansible.module_utils.gitlab.fetch_url')
@pytest.fixture
def module_mock(mocker):
return mocker.patch.multiple(basic.AnsibleModule,
exit_json=exit_json,
fail_json=fail_json)
def test_access_token_output(capfd, fetch_url_mock, module_mock):
fetch_url_mock.return_value = [FakeReader(fake_server_state), {'status': 200}]
set_module_args({
'api_url': 'https://gitlab.example.com/api',
'access_token': 'test-access-token',
'project': '10',
'key': 'ssh-key foobar',
'title': 'a title',
'state': 'absent'
})
with pytest.raises(AnsibleExitJson) as result:
gitlab_deploy_key.main()
first_call = fetch_url_mock.call_args_list[0][1]
assert first_call['url'] == 'https://gitlab.example.com/api/v4/projects/10/deploy_keys'
assert first_call['headers']['Authorization'] == 'Bearer test-access-token'
assert 'Private-Token' not in first_call['headers']
assert first_call['method'] == 'GET'
def test_private_token_output(capfd, fetch_url_mock, module_mock):
fetch_url_mock.return_value = [FakeReader(fake_server_state), {'status': 200}]
set_module_args({
'api_url': 'https://gitlab.example.com/api',
'private_token': 'test-private-token',
'project': 'foo/bar',
'key': 'ssh-key foobar',
'title': 'a title',
'state': 'absent'
})
with pytest.raises(AnsibleExitJson) as result:
gitlab_deploy_key.main()
first_call = fetch_url_mock.call_args_list[0][1]
assert first_call['url'] == 'https://gitlab.example.com/api/v4/projects/foo%2Fbar/deploy_keys'
assert first_call['headers']['Private-Token'] == 'test-private-token'
assert 'Authorization' not in first_call['headers']
assert first_call['method'] == 'GET'
def test_bad_http_first_response(capfd, fetch_url_mock, module_mock):
fetch_url_mock.side_effect = [[FakeReader("Permission denied"), {'status': 403}], [FakeReader("Permission denied"), {'status': 403}]]
set_module_args({
'api_url': 'https://gitlab.example.com/api',
'access_token': 'test-access-token',
'project': '10',
'key': 'ssh-key foobar',
'title': 'a title',
'state': 'absent'
})
with pytest.raises(AnsibleFailJson):
gitlab_deploy_key.main()
def test_bad_http_second_response(capfd, fetch_url_mock, module_mock):
fetch_url_mock.side_effect = [[FakeReader(fake_server_state), {'status': 200}], [FakeReader("Permission denied"), {'status': 403}]]
set_module_args({
'api_url': 'https://gitlab.example.com/api',
'access_token': 'test-access-token',
'project': '10',
'key': 'ssh-key foobar',
'title': 'a title',
'state': 'present'
})
with pytest.raises(AnsibleFailJson):
gitlab_deploy_key.main()
def test_delete_non_existing(capfd, fetch_url_mock, module_mock):
fetch_url_mock.return_value = [FakeReader(fake_server_state), {'status': 200}]
set_module_args({
'api_url': 'https://gitlab.example.com/api',
'access_token': 'test-access-token',
'project': '10',
'key': 'ssh-key foobar',
'title': 'a title',
'state': 'absent'
})
with pytest.raises(AnsibleExitJson) as result:
gitlab_deploy_key.main()
assert result.value.args[0]['changed'] is False
def test_delete_existing(capfd, fetch_url_mock, module_mock):
fetch_url_mock.return_value = [FakeReader(fake_server_state), {'status': 200}]
set_module_args({
'api_url': 'https://gitlab.example.com/api',
'access_token': 'test-access-token',
'project': '10',
'key': 'ssh-rsa long/+base64//+string==',
'title': 'a title',
'state': 'absent'
})
with pytest.raises(AnsibleExitJson) as result:
gitlab_deploy_key.main()
second_call = fetch_url_mock.call_args_list[1][1]
assert second_call['url'] == 'https://gitlab.example.com/api/v4/projects/10/deploy_keys/1'
assert second_call['method'] == 'DELETE'
assert result.value.args[0]['changed'] is True
def test_add_new(capfd, fetch_url_mock, module_mock):
fetch_url_mock.return_value = [FakeReader(fake_server_state), {'status': 200}]
set_module_args({
'api_url': 'https://gitlab.example.com/api',
'access_token': 'test-access-token',
'project': '10',
'key': 'ssh-key foobar',
'title': 'a title',
'state': 'present'
})
with pytest.raises(AnsibleExitJson) as result:
gitlab_deploy_key.main()
second_call = fetch_url_mock.call_args_list[1][1]
assert second_call['url'] == 'https://gitlab.example.com/api/v4/projects/10/deploy_keys'
assert second_call['method'] == 'POST'
assert second_call['data'] == '{"can_push": false, "key": "ssh-key foobar", "title": "a title"}'
assert result.value.args[0]['changed'] is True
def test_update_existing(capfd, fetch_url_mock, module_mock):
fetch_url_mock.return_value = [FakeReader(fake_server_state), {'status': 200}]
set_module_args({
'api_url': 'https://gitlab.example.com/api',
'access_token': 'test-access-token',
'project': '10',
'title': 'Public key',
'key': 'ssh-rsa long/+base64//+string==',
'can_push': 'yes',
'state': 'present'
})
with pytest.raises(AnsibleExitJson) as result:
gitlab_deploy_key.main()
second_call = fetch_url_mock.call_args_list[1][1]
assert second_call['url'] == 'https://gitlab.example.com/api/v4/projects/10/deploy_keys/1'
assert second_call['method'] == 'PUT'
assert second_call['data'] == ('{"can_push": true, "key": "ssh-rsa long/+base64//+string==", "title": "Public key"}')
assert result.value.args[0]['changed'] is True
def test_unchanged_existing(capfd, fetch_url_mock, module_mock):
fetch_url_mock.return_value = [FakeReader(fake_server_state), {'status': 200}]
set_module_args({
'api_url': 'https://gitlab.example.com/api',
'access_token': 'test-access-token',
'project': '10',
'title': 'Public key',
'key': 'ssh-rsa long/+base64//+string==',
'can_push': 'no',
'state': 'present'
})
with pytest.raises(AnsibleExitJson) as result:
gitlab_deploy_key.main()
assert result.value.args[0]['changed'] is False
assert fetch_url_mock.call_count == 1
| gpl-3.0 |
chainer/chainer | chainer/testing/helper.py | 6 | 3610 | import contextlib
import sys
import unittest
import warnings
import pkg_resources
try:
import mock
_mock_error = None
except ImportError as e:
_mock_error = e
def _check_mock_available():
if _mock_error is not None:
raise RuntimeError(
'mock is not available: Reason: {}'.format(_mock_error))
def with_requires(*requirements):
"""Run a test case only when given requirements are satisfied.
.. admonition:: Example
This test case runs only when `numpy>=1.10` is installed.
>>> import unittest
>>> from chainer import testing
>>> class Test(unittest.TestCase):
... @testing.with_requires('numpy>=1.10')
... def test_for_numpy_1_10(self):
... pass
Args:
requirements: A list of string representing requirement condition to
run a given test case.
"""
ws = pkg_resources.WorkingSet()
try:
ws.require(*requirements)
skip = False
except pkg_resources.ResolutionError:
skip = True
msg = 'requires: {}'.format(','.join(requirements))
return unittest.skipIf(skip, msg)
def without_requires(*requirements):
"""Run a test case only when given requirements are not satisfied.
.. admonition:: Example
This test case runs only when `numpy>=1.10` is not installed.
>>> from chainer import testing
... class Test(unittest.TestCase):
... @testing.without_requires('numpy>=1.10')
... def test_without_numpy_1_10(self):
... pass
Args:
requirements: A list of string representing requirement condition to
run a given test case.
"""
ws = pkg_resources.WorkingSet()
try:
ws.require(*requirements)
skip = True
except pkg_resources.ResolutionError:
skip = False
msg = 'requires: {}'.format(','.join(requirements))
return unittest.skipIf(skip, msg)
@contextlib.contextmanager
def assert_warns(expected):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
yield
# Python 2 does not raise warnings multiple times from the same stack
# frame.
if sys.version_info >= (3, 0):
if not any(isinstance(m.message, expected) for m in w):
try:
exc_name = expected.__name__
except AttributeError:
exc_name = str(expected)
raise AssertionError('%s not triggerred' % exc_name)
def _import_object_from_name(fullname):
comps = fullname.split('.')
obj = sys.modules.get(comps[0])
if obj is None:
raise RuntimeError('Can\'t import {}'.format(comps[0]))
for i, comp in enumerate(comps[1:]):
obj = getattr(obj, comp)
if obj is None:
raise RuntimeError(
'Can\'t find object {}'.format('.'.join(comps[:i + 1])))
return obj
def patch(target, *args, **kwargs):
"""A wrapper of mock.patch which appends wraps argument.
.. note::
Unbound methods are not supported as ``wraps`` argument.
Args:
target(str): Full name of target object.
wraps: Wrapping object which will be passed to ``mock.patch`` as
``wraps`` argument.
If omitted, the object specified by ``target`` is used.
*args: Passed to ``mock.patch``.
**kwargs: Passed to ``mock.patch``.
"""
_check_mock_available()
try:
wraps = kwargs.pop('wraps')
except KeyError:
wraps = _import_object_from_name(target)
return mock.patch(target, *args, wraps=wraps, **kwargs)
| mit |
liavkoren/djangoDev | django/contrib/gis/geoip/tests.py | 48 | 4728 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import unittest
from unittest import skipUnless
from django.conf import settings
from django.contrib.gis.geos import HAS_GEOS
from django.contrib.gis.geoip import HAS_GEOIP
from django.utils import six
if HAS_GEOIP:
from . import GeoIP, GeoIPException
if HAS_GEOS:
from ..geos import GEOSGeometry
# Note: Requires use of both the GeoIP country and city datasets.
# The GEOIP_DATA path should be the only setting set (the directory
# should contain links or the actual database files 'GeoIP.dat' and
# 'GeoLiteCity.dat'.
@skipUnless(HAS_GEOIP and getattr(settings, "GEOIP_PATH", None),
"GeoIP is required along with the GEOIP_PATH setting.")
class GeoIPTest(unittest.TestCase):
def test01_init(self):
"Testing GeoIP initialization."
g1 = GeoIP() # Everything inferred from GeoIP path
path = settings.GEOIP_PATH
g2 = GeoIP(path, 0) # Passing in data path explicitly.
g3 = GeoIP.open(path, 0) # MaxMind Python API syntax.
for g in (g1, g2, g3):
self.assertEqual(True, bool(g._country))
self.assertEqual(True, bool(g._city))
# Only passing in the location of one database.
city = os.path.join(path, 'GeoLiteCity.dat')
cntry = os.path.join(path, 'GeoIP.dat')
g4 = GeoIP(city, country='')
self.assertEqual(None, g4._country)
g5 = GeoIP(cntry, city='')
self.assertEqual(None, g5._city)
# Improper parameters.
bad_params = (23, 'foo', 15.23)
for bad in bad_params:
self.assertRaises(GeoIPException, GeoIP, cache=bad)
if isinstance(bad, six.string_types):
e = GeoIPException
else:
e = TypeError
self.assertRaises(e, GeoIP, bad, 0)
def test02_bad_query(self):
"Testing GeoIP query parameter checking."
cntry_g = GeoIP(city='<foo>')
# No city database available, these calls should fail.
self.assertRaises(GeoIPException, cntry_g.city, 'google.com')
self.assertRaises(GeoIPException, cntry_g.coords, 'yahoo.com')
# Non-string query should raise TypeError
self.assertRaises(TypeError, cntry_g.country_code, 17)
self.assertRaises(TypeError, cntry_g.country_name, GeoIP)
def test03_country(self):
"Testing GeoIP country querying methods."
g = GeoIP(city='<foo>')
fqdn = 'www.google.com'
addr = '12.215.42.19'
for query in (fqdn, addr):
for func in (g.country_code, g.country_code_by_addr, g.country_code_by_name):
self.assertEqual('US', func(query))
for func in (g.country_name, g.country_name_by_addr, g.country_name_by_name):
self.assertEqual('United States', func(query))
self.assertEqual({'country_code': 'US', 'country_name': 'United States'},
g.country(query))
@skipUnless(HAS_GEOS, "Geos is required")
def test04_city(self):
"Testing GeoIP city querying methods."
g = GeoIP(country='<foo>')
addr = '128.249.1.1'
fqdn = 'tmc.edu'
for query in (fqdn, addr):
# Country queries should still work.
for func in (g.country_code, g.country_code_by_addr, g.country_code_by_name):
self.assertEqual('US', func(query))
for func in (g.country_name, g.country_name_by_addr, g.country_name_by_name):
self.assertEqual('United States', func(query))
self.assertEqual({'country_code': 'US', 'country_name': 'United States'},
g.country(query))
# City information dictionary.
d = g.city(query)
self.assertEqual('USA', d['country_code3'])
self.assertEqual('Houston', d['city'])
self.assertEqual('TX', d['region'])
self.assertEqual(713, d['area_code'])
geom = g.geos(query)
self.assertIsInstance(geom, GEOSGeometry)
lon, lat = (-95.4010, 29.7079)
lat_lon = g.lat_lon(query)
lat_lon = (lat_lon[1], lat_lon[0])
for tup in (geom.tuple, g.coords(query), g.lon_lat(query), lat_lon):
self.assertAlmostEqual(lon, tup[0], 4)
self.assertAlmostEqual(lat, tup[1], 4)
def test05_unicode_response(self):
"Testing that GeoIP strings are properly encoded, see #16553."
g = GeoIP()
d = g.city("www.osnabrueck.de")
self.assertEqual('Osnabrück', d['city'])
d = g.country('200.7.49.81')
self.assertEqual('Curaçao', d['country_name'])
| bsd-3-clause |
kumanna/Simple-OFDM-Modem | usrp/transmit.py | 1 | 1973 | #!/usr/bin/env python
"""
test
"""
INTERP = 128
TXGAIN = 30
CONSTANT = 0.10
from gnuradio import gr, gr_unittest
import usrp_options
from optparse import OptionParser
from gnuradio.eng_option import eng_option
from pick_bitrate import pick_tx_bitrate
def main():
gr.enable_realtime_scheduling()
tb = gr.top_block ()
src = gr.file_source(gr.sizeof_gr_complex, "transmit-data.dat", True)
parser = OptionParser(option_class=eng_option, conflict_handler="resolve")
(options, args) = parser.parse_args ()
d = {'verbose': True, 'discontinuous': False, 'samples_per_symbol': 2, 'usrpx': None, 'interp': INTERP, 'fusb_block_size': 0, 'megabytes': 1.0, 'rx_freq': 2.475e9, 'size': 1500, 'show_tx_gain_range': False, 'log': False, 'tx_subdev_spec': None, 'fusb_nblocks': 0, 'lo_offset': None, 'tx_gain': TXGAIN, 'which': 0, 'modulation': 'gmsk', 'excess_bw': 0.34999999999999998, 'bt': 0.34999999999999998, 'interface': 'eth0', 'freq': None, 'bitrate': 100000.0, 'from_file': None, 'tx_freq': 2475000000.0, 'mac_addr': '', 'tx_amplitude': 0.1, 'gray_code': True}
for i, j in d.items():
setattr(options, i, j)
u = usrp_options.create_usrp_sink(options)
dac_rate = u.dac_rate()
if options.verbose:
print 'USRP Sink:', u
(_bitrate, _samples_per_symbol, _interp) = \
pick_tx_bitrate(options.bitrate, 2, \
options.samples_per_symbol, options.interp, dac_rate, \
u.get_interp_rates())
u.set_interp(_interp)
u.set_auto_tr(True)
if not u.set_center_freq(options.tx_freq):
print "Failed to set Rx frequency to %s" % (eng_notation.num_to_str(options.tx_freq))
raise ValueError, eng_notation.num_to_str(options.tx_freq)
m = gr.multiply_const_cc(CONSTANT)
tb.connect(src, m, u)
tb.run()
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print "Bye"
| gpl-3.0 |
Vimos/scikit-learn | sklearn/ensemble/tests/test_partial_dependence.py | 365 | 6996 | """
Testing for the partial dependence module.
"""
import numpy as np
from numpy.testing import assert_array_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import if_matplotlib
from sklearn.ensemble.partial_dependence import partial_dependence
from sklearn.ensemble.partial_dependence import plot_partial_dependence
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.ensemble import GradientBoostingRegressor
from sklearn import datasets
# toy sample
X = [[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1]]
y = [-1, -1, -1, 1, 1, 1]
T = [[-1, -1], [2, 2], [3, 2]]
true_result = [-1, 1, 1]
# also load the boston dataset
boston = datasets.load_boston()
# also load the iris dataset
iris = datasets.load_iris()
def test_partial_dependence_classifier():
# Test partial dependence for classifier
clf = GradientBoostingClassifier(n_estimators=10, random_state=1)
clf.fit(X, y)
pdp, axes = partial_dependence(clf, [0], X=X, grid_resolution=5)
# only 4 grid points instead of 5 because only 4 unique X[:,0] vals
assert pdp.shape == (1, 4)
assert axes[0].shape[0] == 4
# now with our own grid
X_ = np.asarray(X)
grid = np.unique(X_[:, 0])
pdp_2, axes = partial_dependence(clf, [0], grid=grid)
assert axes is None
assert_array_equal(pdp, pdp_2)
def test_partial_dependence_multiclass():
# Test partial dependence for multi-class classifier
clf = GradientBoostingClassifier(n_estimators=10, random_state=1)
clf.fit(iris.data, iris.target)
grid_resolution = 25
n_classes = clf.n_classes_
pdp, axes = partial_dependence(
clf, [0], X=iris.data, grid_resolution=grid_resolution)
assert pdp.shape == (n_classes, grid_resolution)
assert len(axes) == 1
assert axes[0].shape[0] == grid_resolution
def test_partial_dependence_regressor():
# Test partial dependence for regressor
clf = GradientBoostingRegressor(n_estimators=10, random_state=1)
clf.fit(boston.data, boston.target)
grid_resolution = 25
pdp, axes = partial_dependence(
clf, [0], X=boston.data, grid_resolution=grid_resolution)
assert pdp.shape == (1, grid_resolution)
assert axes[0].shape[0] == grid_resolution
def test_partial_dependecy_input():
# Test input validation of partial dependence.
clf = GradientBoostingClassifier(n_estimators=10, random_state=1)
clf.fit(X, y)
assert_raises(ValueError, partial_dependence,
clf, [0], grid=None, X=None)
assert_raises(ValueError, partial_dependence,
clf, [0], grid=[0, 1], X=X)
# first argument must be an instance of BaseGradientBoosting
assert_raises(ValueError, partial_dependence,
{}, [0], X=X)
# Gradient boosting estimator must be fit
assert_raises(ValueError, partial_dependence,
GradientBoostingClassifier(), [0], X=X)
assert_raises(ValueError, partial_dependence, clf, [-1], X=X)
assert_raises(ValueError, partial_dependence, clf, [100], X=X)
# wrong ndim for grid
grid = np.random.rand(10, 2, 1)
assert_raises(ValueError, partial_dependence, clf, [0], grid=grid)
@if_matplotlib
def test_plot_partial_dependence():
# Test partial dependence plot function.
clf = GradientBoostingRegressor(n_estimators=10, random_state=1)
clf.fit(boston.data, boston.target)
grid_resolution = 25
fig, axs = plot_partial_dependence(clf, boston.data, [0, 1, (0, 1)],
grid_resolution=grid_resolution,
feature_names=boston.feature_names)
assert len(axs) == 3
assert all(ax.has_data for ax in axs)
# check with str features and array feature names
fig, axs = plot_partial_dependence(clf, boston.data, ['CRIM', 'ZN',
('CRIM', 'ZN')],
grid_resolution=grid_resolution,
feature_names=boston.feature_names)
assert len(axs) == 3
assert all(ax.has_data for ax in axs)
# check with list feature_names
feature_names = boston.feature_names.tolist()
fig, axs = plot_partial_dependence(clf, boston.data, ['CRIM', 'ZN',
('CRIM', 'ZN')],
grid_resolution=grid_resolution,
feature_names=feature_names)
assert len(axs) == 3
assert all(ax.has_data for ax in axs)
@if_matplotlib
def test_plot_partial_dependence_input():
# Test partial dependence plot function input checks.
clf = GradientBoostingClassifier(n_estimators=10, random_state=1)
# not fitted yet
assert_raises(ValueError, plot_partial_dependence,
clf, X, [0])
clf.fit(X, y)
assert_raises(ValueError, plot_partial_dependence,
clf, np.array(X)[:, :0], [0])
# first argument must be an instance of BaseGradientBoosting
assert_raises(ValueError, plot_partial_dependence,
{}, X, [0])
# must be larger than -1
assert_raises(ValueError, plot_partial_dependence,
clf, X, [-1])
# too large feature value
assert_raises(ValueError, plot_partial_dependence,
clf, X, [100])
# str feature but no feature_names
assert_raises(ValueError, plot_partial_dependence,
clf, X, ['foobar'])
# not valid features value
assert_raises(ValueError, plot_partial_dependence,
clf, X, [{'foo': 'bar'}])
@if_matplotlib
def test_plot_partial_dependence_multiclass():
# Test partial dependence plot function on multi-class input.
clf = GradientBoostingClassifier(n_estimators=10, random_state=1)
clf.fit(iris.data, iris.target)
grid_resolution = 25
fig, axs = plot_partial_dependence(clf, iris.data, [0, 1],
label=0,
grid_resolution=grid_resolution)
assert len(axs) == 2
assert all(ax.has_data for ax in axs)
# now with symbol labels
target = iris.target_names[iris.target]
clf = GradientBoostingClassifier(n_estimators=10, random_state=1)
clf.fit(iris.data, target)
grid_resolution = 25
fig, axs = plot_partial_dependence(clf, iris.data, [0, 1],
label='setosa',
grid_resolution=grid_resolution)
assert len(axs) == 2
assert all(ax.has_data for ax in axs)
# label not in gbrt.classes_
assert_raises(ValueError, plot_partial_dependence,
clf, iris.data, [0, 1], label='foobar',
grid_resolution=grid_resolution)
# label not provided
assert_raises(ValueError, plot_partial_dependence,
clf, iris.data, [0, 1],
grid_resolution=grid_resolution)
| bsd-3-clause |
Neamar/django | tests/deprecation/tests.py | 199 | 7253 | from __future__ import unicode_literals
import os
import unittest
import warnings
from django.test import SimpleTestCase
from django.test.utils import reset_warning_registry
from django.utils import six
from django.utils.deprecation import RenameMethodsBase
from django.utils.encoding import force_text
class RenameManagerMethods(RenameMethodsBase):
renamed_methods = (
('old', 'new', DeprecationWarning),
)
class RenameMethodsTests(SimpleTestCase):
"""
Tests the `RenameMethodsBase` type introduced to rename `get_query_set`
to `get_queryset` across the code base following #15363.
"""
def test_class_definition_warnings(self):
"""
Ensure a warning is raised upon class definition to suggest renaming
the faulty method.
"""
reset_warning_registry()
with warnings.catch_warnings(record=True) as recorded:
warnings.simplefilter('always')
class Manager(six.with_metaclass(RenameManagerMethods)):
def old(self):
pass
self.assertEqual(len(recorded), 1)
msg = str(recorded[0].message)
self.assertEqual(msg,
'`Manager.old` method should be renamed `new`.')
def test_get_new_defined(self):
"""
Ensure `old` complains and not `new` when only `new` is defined.
"""
with warnings.catch_warnings(record=True) as recorded:
warnings.simplefilter('ignore')
class Manager(six.with_metaclass(RenameManagerMethods)):
def new(self):
pass
warnings.simplefilter('always')
manager = Manager()
manager.new()
self.assertEqual(len(recorded), 0)
manager.old()
self.assertEqual(len(recorded), 1)
msg = str(recorded.pop().message)
self.assertEqual(msg,
'`Manager.old` is deprecated, use `new` instead.')
def test_get_old_defined(self):
"""
Ensure `old` complains when only `old` is defined.
"""
with warnings.catch_warnings(record=True) as recorded:
warnings.simplefilter('ignore')
class Manager(six.with_metaclass(RenameManagerMethods)):
def old(self):
pass
warnings.simplefilter('always')
manager = Manager()
manager.new()
self.assertEqual(len(recorded), 0)
manager.old()
self.assertEqual(len(recorded), 1)
msg = str(recorded.pop().message)
self.assertEqual(msg,
'`Manager.old` is deprecated, use `new` instead.')
def test_deprecated_subclass_renamed(self):
"""
Ensure the correct warnings are raised when a class that didn't rename
`old` subclass one that did.
"""
with warnings.catch_warnings(record=True) as recorded:
warnings.simplefilter('ignore')
class Renamed(six.with_metaclass(RenameManagerMethods)):
def new(self):
pass
class Deprecated(Renamed):
def old(self):
super(Deprecated, self).old()
warnings.simplefilter('always')
deprecated = Deprecated()
deprecated.new()
self.assertEqual(len(recorded), 1)
msg = str(recorded.pop().message)
self.assertEqual(msg,
'`Renamed.old` is deprecated, use `new` instead.')
recorded[:] = []
deprecated.old()
self.assertEqual(len(recorded), 2)
msgs = [str(warning.message) for warning in recorded]
self.assertEqual(msgs, [
'`Deprecated.old` is deprecated, use `new` instead.',
'`Renamed.old` is deprecated, use `new` instead.',
])
def test_renamed_subclass_deprecated(self):
"""
Ensure the correct warnings are raised when a class that renamed
`old` subclass one that didn't.
"""
with warnings.catch_warnings(record=True) as recorded:
warnings.simplefilter('ignore')
class Deprecated(six.with_metaclass(RenameManagerMethods)):
def old(self):
pass
class Renamed(Deprecated):
def new(self):
super(Renamed, self).new()
warnings.simplefilter('always')
renamed = Renamed()
renamed.new()
self.assertEqual(len(recorded), 0)
renamed.old()
self.assertEqual(len(recorded), 1)
msg = str(recorded.pop().message)
self.assertEqual(msg,
'`Renamed.old` is deprecated, use `new` instead.')
def test_deprecated_subclass_renamed_and_mixins(self):
"""
Ensure the correct warnings are raised when a subclass inherit from a
class that renamed `old` and mixins that may or may not have renamed
`new`.
"""
with warnings.catch_warnings(record=True) as recorded:
warnings.simplefilter('ignore')
class Renamed(six.with_metaclass(RenameManagerMethods)):
def new(self):
pass
class RenamedMixin(object):
def new(self):
super(RenamedMixin, self).new()
class DeprecatedMixin(object):
def old(self):
super(DeprecatedMixin, self).old()
class Deprecated(DeprecatedMixin, RenamedMixin, Renamed):
pass
warnings.simplefilter('always')
deprecated = Deprecated()
deprecated.new()
self.assertEqual(len(recorded), 1)
msg = str(recorded.pop().message)
self.assertEqual(msg,
'`RenamedMixin.old` is deprecated, use `new` instead.')
deprecated.old()
self.assertEqual(len(recorded), 2)
msgs = [str(warning.message) for warning in recorded]
self.assertEqual(msgs, [
'`DeprecatedMixin.old` is deprecated, use `new` instead.',
'`RenamedMixin.old` is deprecated, use `new` instead.',
])
class DeprecatingSimpleTestCaseUrls(unittest.TestCase):
def test_deprecation(self):
"""
Ensure the correct warning is raised when SimpleTestCase.urls is used.
"""
class TempTestCase(SimpleTestCase):
urls = 'tests.urls'
def test(self):
pass
with warnings.catch_warnings(record=True) as recorded:
warnings.filterwarnings('always')
suite = unittest.TestLoader().loadTestsFromTestCase(TempTestCase)
with open(os.devnull, 'w') as devnull:
unittest.TextTestRunner(stream=devnull, verbosity=2).run(suite)
msg = force_text(recorded.pop().message)
self.assertEqual(msg,
"SimpleTestCase.urls is deprecated and will be removed in "
"Django 1.10. Use @override_settings(ROOT_URLCONF=...) "
"in TempTestCase instead.")
| bsd-3-clause |
twz915/django | django/db/backends/sqlite3/base.py | 4 | 17967 | """
SQLite3 backend for the sqlite3 module in the standard library.
"""
import decimal
import re
import warnings
from sqlite3 import dbapi2 as Database
import pytz
from django.core.exceptions import ImproperlyConfigured
from django.db import utils
from django.db.backends import utils as backend_utils
from django.db.backends.base.base import BaseDatabaseWrapper
from django.utils import timezone
from django.utils.dateparse import (
parse_date, parse_datetime, parse_duration, parse_time,
)
from django.utils.encoding import force_text
from .client import DatabaseClient # isort:skip
from .creation import DatabaseCreation # isort:skip
from .features import DatabaseFeatures # isort:skip
from .introspection import DatabaseIntrospection # isort:skip
from .operations import DatabaseOperations # isort:skip
from .schema import DatabaseSchemaEditor # isort:skip
def decoder(conv_func):
""" The Python sqlite3 interface returns always byte strings.
This function converts the received value to a regular string before
passing it to the receiver function.
"""
return lambda s: conv_func(s.decode('utf-8'))
Database.register_converter("bool", decoder(lambda s: s == '1'))
Database.register_converter("time", decoder(parse_time))
Database.register_converter("date", decoder(parse_date))
Database.register_converter("datetime", decoder(parse_datetime))
Database.register_converter("timestamp", decoder(parse_datetime))
Database.register_converter("TIMESTAMP", decoder(parse_datetime))
Database.register_converter("decimal", decoder(backend_utils.typecast_decimal))
Database.register_adapter(decimal.Decimal, backend_utils.rev_typecast_decimal)
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = 'sqlite'
# SQLite doesn't actually support most of these types, but it "does the right
# thing" given more verbose field definitions, so leave them as is so that
# schema inspection is more useful.
data_types = {
'AutoField': 'integer',
'BigAutoField': 'integer',
'BinaryField': 'BLOB',
'BooleanField': 'bool',
'CharField': 'varchar(%(max_length)s)',
'DateField': 'date',
'DateTimeField': 'datetime',
'DecimalField': 'decimal',
'DurationField': 'bigint',
'FileField': 'varchar(%(max_length)s)',
'FilePathField': 'varchar(%(max_length)s)',
'FloatField': 'real',
'IntegerField': 'integer',
'BigIntegerField': 'bigint',
'IPAddressField': 'char(15)',
'GenericIPAddressField': 'char(39)',
'NullBooleanField': 'bool',
'OneToOneField': 'integer',
'PositiveIntegerField': 'integer unsigned',
'PositiveSmallIntegerField': 'smallint unsigned',
'SlugField': 'varchar(%(max_length)s)',
'SmallIntegerField': 'smallint',
'TextField': 'text',
'TimeField': 'time',
'UUIDField': 'char(32)',
}
data_types_suffix = {
'AutoField': 'AUTOINCREMENT',
'BigAutoField': 'AUTOINCREMENT',
}
# SQLite requires LIKE statements to include an ESCAPE clause if the value
# being escaped has a percent or underscore in it.
# See http://www.sqlite.org/lang_expr.html for an explanation.
operators = {
'exact': '= %s',
'iexact': "LIKE %s ESCAPE '\\'",
'contains': "LIKE %s ESCAPE '\\'",
'icontains': "LIKE %s ESCAPE '\\'",
'regex': 'REGEXP %s',
'iregex': "REGEXP '(?i)' || %s",
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': "LIKE %s ESCAPE '\\'",
'endswith': "LIKE %s ESCAPE '\\'",
'istartswith': "LIKE %s ESCAPE '\\'",
'iendswith': "LIKE %s ESCAPE '\\'",
}
# The patterns below are used to generate SQL pattern lookup clauses when
# the right-hand side of the lookup isn't a raw string (it might be an expression
# or the result of a bilateral transformation).
# In those cases, special characters for LIKE operators (e.g. \, *, _) should be
# escaped on database side.
#
# Note: we use str.format() here for readability as '%' is used as a wildcard for
# the LIKE operator.
pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\', '\\'), '%%', '\%%'), '_', '\_')"
pattern_ops = {
'contains': r"LIKE '%%' || {} || '%%' ESCAPE '\'",
'icontains': r"LIKE '%%' || UPPER({}) || '%%' ESCAPE '\'",
'startswith': r"LIKE {} || '%%' ESCAPE '\'",
'istartswith': r"LIKE UPPER({}) || '%%' ESCAPE '\'",
'endswith': r"LIKE '%%' || {} ESCAPE '\'",
'iendswith': r"LIKE '%%' || UPPER({}) ESCAPE '\'",
}
Database = Database
SchemaEditorClass = DatabaseSchemaEditor
# Classes instantiated in __init__().
client_class = DatabaseClient
creation_class = DatabaseCreation
features_class = DatabaseFeatures
introspection_class = DatabaseIntrospection
ops_class = DatabaseOperations
def get_connection_params(self):
settings_dict = self.settings_dict
if not settings_dict['NAME']:
raise ImproperlyConfigured(
"settings.DATABASES is improperly configured. "
"Please supply the NAME value.")
kwargs = {
'database': settings_dict['NAME'],
'detect_types': Database.PARSE_DECLTYPES | Database.PARSE_COLNAMES,
}
kwargs.update(settings_dict['OPTIONS'])
# Always allow the underlying SQLite connection to be shareable
# between multiple threads. The safe-guarding will be handled at a
# higher level by the `BaseDatabaseWrapper.allow_thread_sharing`
# property. This is necessary as the shareability is disabled by
# default in pysqlite and it cannot be changed once a connection is
# opened.
if 'check_same_thread' in kwargs and kwargs['check_same_thread']:
warnings.warn(
'The `check_same_thread` option was provided and set to '
'True. It will be overridden with False. Use the '
'`DatabaseWrapper.allow_thread_sharing` property instead '
'for controlling thread shareability.',
RuntimeWarning
)
kwargs.update({'check_same_thread': False})
if self.features.can_share_in_memory_db:
kwargs.update({'uri': True})
return kwargs
def get_new_connection(self, conn_params):
conn = Database.connect(**conn_params)
conn.create_function("django_date_extract", 2, _sqlite_date_extract)
conn.create_function("django_date_trunc", 2, _sqlite_date_trunc)
conn.create_function("django_datetime_cast_date", 2, _sqlite_datetime_cast_date)
conn.create_function("django_datetime_cast_time", 2, _sqlite_datetime_cast_time)
conn.create_function("django_datetime_extract", 3, _sqlite_datetime_extract)
conn.create_function("django_datetime_trunc", 3, _sqlite_datetime_trunc)
conn.create_function("django_time_extract", 2, _sqlite_time_extract)
conn.create_function("django_time_trunc", 2, _sqlite_time_trunc)
conn.create_function("django_time_diff", 2, _sqlite_time_diff)
conn.create_function("django_timestamp_diff", 2, _sqlite_timestamp_diff)
conn.create_function("regexp", 2, _sqlite_regexp)
conn.create_function("django_format_dtdelta", 3, _sqlite_format_dtdelta)
conn.create_function("django_power", 2, _sqlite_power)
return conn
def init_connection_state(self):
pass
def create_cursor(self, name=None):
return self.connection.cursor(factory=SQLiteCursorWrapper)
def close(self):
self.validate_thread_sharing()
# If database is in memory, closing the connection destroys the
# database. To prevent accidental data loss, ignore close requests on
# an in-memory db.
if not self.is_in_memory_db():
BaseDatabaseWrapper.close(self)
def _savepoint_allowed(self):
# Two conditions are required here:
# - A sufficiently recent version of SQLite to support savepoints,
# - Being in a transaction, which can only happen inside 'atomic'.
# When 'isolation_level' is not None, sqlite3 commits before each
# savepoint; it's a bug. When it is None, savepoints don't make sense
# because autocommit is enabled. The only exception is inside 'atomic'
# blocks. To work around that bug, on SQLite, 'atomic' starts a
# transaction explicitly rather than simply disable autocommit.
return self.features.uses_savepoints and self.in_atomic_block
def _set_autocommit(self, autocommit):
if autocommit:
level = None
else:
# sqlite3's internal default is ''. It's different from None.
# See Modules/_sqlite/connection.c.
level = ''
# 'isolation_level' is a misleading API.
# SQLite always runs at the SERIALIZABLE isolation level.
with self.wrap_database_errors:
self.connection.isolation_level = level
def check_constraints(self, table_names=None):
"""
Checks each table name in `table_names` for rows with invalid foreign
key references. This method is intended to be used in conjunction with
`disable_constraint_checking()` and `enable_constraint_checking()`, to
determine if rows with invalid references were entered while constraint
checks were off.
Raises an IntegrityError on the first invalid foreign key reference
encountered (if any) and provides detailed information about the
invalid reference in the error message.
Backends can override this method if they can more directly apply
constraint checking (e.g. via "SET CONSTRAINTS ALL IMMEDIATE")
"""
cursor = self.cursor()
if table_names is None:
table_names = self.introspection.table_names(cursor)
for table_name in table_names:
primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name)
if not primary_key_column_name:
continue
key_columns = self.introspection.get_key_columns(cursor, table_name)
for column_name, referenced_table_name, referenced_column_name in key_columns:
cursor.execute(
"""
SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING
LEFT JOIN `%s` as REFERRED
ON (REFERRING.`%s` = REFERRED.`%s`)
WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL
"""
% (
primary_key_column_name, column_name, table_name,
referenced_table_name, column_name, referenced_column_name,
column_name, referenced_column_name,
)
)
for bad_row in cursor.fetchall():
raise utils.IntegrityError(
"The row in table '%s' with primary key '%s' has an "
"invalid foreign key: %s.%s contains a value '%s' that "
"does not have a corresponding value in %s.%s." % (
table_name, bad_row[0], table_name, column_name,
bad_row[1], referenced_table_name, referenced_column_name,
)
)
def is_usable(self):
return True
def _start_transaction_under_autocommit(self):
"""
Start a transaction explicitly in autocommit mode.
Staying in autocommit mode works around a bug of sqlite3 that breaks
savepoints when autocommit is disabled.
"""
self.cursor().execute("BEGIN")
def is_in_memory_db(self):
return self.creation.is_in_memory_db(self.settings_dict['NAME'])
FORMAT_QMARK_REGEX = re.compile(r'(?<!%)%s')
class SQLiteCursorWrapper(Database.Cursor):
"""
Django uses "format" style placeholders, but pysqlite2 uses "qmark" style.
This fixes it -- but note that if you want to use a literal "%s" in a query,
you'll need to use "%%s".
"""
def execute(self, query, params=None):
if params is None:
return Database.Cursor.execute(self, query)
query = self.convert_query(query)
return Database.Cursor.execute(self, query, params)
def executemany(self, query, param_list):
query = self.convert_query(query)
return Database.Cursor.executemany(self, query, param_list)
def convert_query(self, query):
return FORMAT_QMARK_REGEX.sub('?', query).replace('%%', '%')
def _sqlite_date_extract(lookup_type, dt):
if dt is None:
return None
try:
dt = backend_utils.typecast_timestamp(dt)
except (ValueError, TypeError):
return None
if lookup_type == 'week_day':
return (dt.isoweekday() % 7) + 1
elif lookup_type == 'week':
return dt.isocalendar()[1]
else:
return getattr(dt, lookup_type)
def _sqlite_date_trunc(lookup_type, dt):
try:
dt = backend_utils.typecast_timestamp(dt)
except (ValueError, TypeError):
return None
if lookup_type == 'year':
return "%i-01-01" % dt.year
elif lookup_type == 'month':
return "%i-%02i-01" % (dt.year, dt.month)
elif lookup_type == 'day':
return "%i-%02i-%02i" % (dt.year, dt.month, dt.day)
def _sqlite_time_trunc(lookup_type, dt):
try:
dt = backend_utils.typecast_time(dt)
except (ValueError, TypeError):
return None
if lookup_type == 'hour':
return "%02i:00:00" % dt.hour
elif lookup_type == 'minute':
return "%02i:%02i:00" % (dt.hour, dt.minute)
elif lookup_type == 'second':
return "%02i:%02i:%02i" % (dt.hour, dt.minute, dt.second)
def _sqlite_datetime_parse(dt, tzname):
if dt is None:
return None
try:
dt = backend_utils.typecast_timestamp(dt)
except (ValueError, TypeError):
return None
if tzname is not None:
dt = timezone.localtime(dt, pytz.timezone(tzname))
return dt
def _sqlite_datetime_cast_date(dt, tzname):
dt = _sqlite_datetime_parse(dt, tzname)
if dt is None:
return None
return dt.date().isoformat()
def _sqlite_datetime_cast_time(dt, tzname):
dt = _sqlite_datetime_parse(dt, tzname)
if dt is None:
return None
return dt.time().isoformat()
def _sqlite_datetime_extract(lookup_type, dt, tzname):
dt = _sqlite_datetime_parse(dt, tzname)
if dt is None:
return None
if lookup_type == 'week_day':
return (dt.isoweekday() % 7) + 1
elif lookup_type == 'week':
return dt.isocalendar()[1]
else:
return getattr(dt, lookup_type)
def _sqlite_datetime_trunc(lookup_type, dt, tzname):
dt = _sqlite_datetime_parse(dt, tzname)
if dt is None:
return None
if lookup_type == 'year':
return "%i-01-01 00:00:00" % dt.year
elif lookup_type == 'month':
return "%i-%02i-01 00:00:00" % (dt.year, dt.month)
elif lookup_type == 'day':
return "%i-%02i-%02i 00:00:00" % (dt.year, dt.month, dt.day)
elif lookup_type == 'hour':
return "%i-%02i-%02i %02i:00:00" % (dt.year, dt.month, dt.day, dt.hour)
elif lookup_type == 'minute':
return "%i-%02i-%02i %02i:%02i:00" % (dt.year, dt.month, dt.day, dt.hour, dt.minute)
elif lookup_type == 'second':
return "%i-%02i-%02i %02i:%02i:%02i" % (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
def _sqlite_time_extract(lookup_type, dt):
if dt is None:
return None
try:
dt = backend_utils.typecast_time(dt)
except (ValueError, TypeError):
return None
return getattr(dt, lookup_type)
def _sqlite_format_dtdelta(conn, lhs, rhs):
"""
LHS and RHS can be either:
- An integer number of microseconds
- A string representing a timedelta object
- A string representing a datetime
"""
try:
if isinstance(lhs, int):
lhs = str(decimal.Decimal(lhs) / decimal.Decimal(1000000))
real_lhs = parse_duration(lhs)
if real_lhs is None:
real_lhs = backend_utils.typecast_timestamp(lhs)
if isinstance(rhs, int):
rhs = str(decimal.Decimal(rhs) / decimal.Decimal(1000000))
real_rhs = parse_duration(rhs)
if real_rhs is None:
real_rhs = backend_utils.typecast_timestamp(rhs)
if conn.strip() == '+':
out = real_lhs + real_rhs
else:
out = real_lhs - real_rhs
except (ValueError, TypeError):
return None
# typecast_timestamp returns a date or a datetime without timezone.
# It will be formatted as "%Y-%m-%d" or "%Y-%m-%d %H:%M:%S[.%f]"
return str(out)
def _sqlite_time_diff(lhs, rhs):
left = backend_utils.typecast_time(lhs)
right = backend_utils.typecast_time(rhs)
return (
(left.hour * 60 * 60 * 1000000) +
(left.minute * 60 * 1000000) +
(left.second * 1000000) +
(left.microsecond) -
(right.hour * 60 * 60 * 1000000) -
(right.minute * 60 * 1000000) -
(right.second * 1000000) -
(right.microsecond)
)
def _sqlite_timestamp_diff(lhs, rhs):
left = backend_utils.typecast_timestamp(lhs)
right = backend_utils.typecast_timestamp(rhs)
return (left - right).total_seconds() * 1000000
def _sqlite_regexp(re_pattern, re_string):
return bool(re.search(re_pattern, force_text(re_string))) if re_string is not None else False
def _sqlite_power(x, y):
return x ** y
| bsd-3-clause |
laborautonomo/bitmask_client | src/leap/bitmask/util/credentials.py | 6 | 2785 | # -*- coding: utf-8 -*-
# credentials.py
# Copyright (C) 2013 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Credentials utilities
"""
from PySide import QtCore, QtGui
WEAK_PASSWORDS = ("123456", "qweasd", "qwerty", "password")
USERNAME_REGEX = r"^[a-z][a-z\d_\-\.]+[a-z\d]$"
USERNAME_VALIDATOR = QtGui.QRegExpValidator(QtCore.QRegExp(USERNAME_REGEX))
def username_checks(username):
# translation helper
_tr = QtCore.QObject().tr
message = None
if message is None and len(username) < 2:
message = _tr("Username must have at least 2 characters")
valid = USERNAME_VALIDATOR.validate(username, 0)
valid_username = valid[0] == QtGui.QValidator.State.Acceptable
if message is None and not valid_username:
message = _tr("That username is not allowed. Try another.")
return message is None, message
def password_checks(username, password, password2):
"""
Performs basic password checks to avoid really easy passwords.
:param username: username provided at the registrarion form
:type username: str
:param password: password from the registration form
:type password: str
:param password2: second password from the registration form
:type password: str
:returns: (True, None, None) if all the checks pass,
(False, message, field name) otherwise
:rtype: tuple(bool, str, str)
"""
# translation helper
_tr = QtCore.QObject().tr
message = None
field = None
if message is None and password != password2:
message = _tr("Passwords don't match")
field = 'new_password_confirmation'
if message is None and not password:
message = _tr("Password is empty")
field = 'new_password'
if message is None and len(password) < 8:
message = _tr("Password is too short")
field = 'new_password'
if message is None and password in WEAK_PASSWORDS:
message = _tr("Password is too easy")
field = 'new_password'
if message is None and username == password:
message = _tr("Password can't be the same as username")
field = 'new_password'
return message is None, message, field
| gpl-3.0 |
JamisHoo/Cloud-Image-Migration-Tool | usr/lib/requests/packages/chardet/langhungarianmodel.py | 2763 | 12536 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Character Mapping Table:
Latin2_HungarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8,
23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,
175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,
191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205,
79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231,
232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241,
82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85,
245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253,
)
win1250HungarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8,
23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,
177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190,
191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205,
81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231,
232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241,
84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87,
245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 94.7368%
# first 1024 sequences:5.2623%
# rest sequences: 0.8894%
# negative sequences: 0.0009%
HungarianLangModel = (
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2,
3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3,
0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2,
0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0,
1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0,
1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0,
1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1,
3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1,
2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1,
2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1,
2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1,
2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0,
2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1,
2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1,
2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1,
2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,
1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1,
1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1,
3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0,
1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1,
1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1,
2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1,
2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0,
2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1,
3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1,
2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1,
1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0,
1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1,
2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,
1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0,
1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1,
2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0,
1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0,
1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0,
2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1,
2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1,
2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1,
1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1,
1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0,
0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1,
2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1,
1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1,
2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,
1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0,
1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0,
2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0,
2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1,
2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0,
1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0,
0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,
0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
)
Latin2HungarianModel = {
'charToOrderMap': Latin2_HungarianCharToOrderMap,
'precedenceMatrix': HungarianLangModel,
'mTypicalPositiveRatio': 0.947368,
'keepEnglishLetter': True,
'charsetName': "ISO-8859-2"
}
Win1250HungarianModel = {
'charToOrderMap': win1250HungarianCharToOrderMap,
'precedenceMatrix': HungarianLangModel,
'mTypicalPositiveRatio': 0.947368,
'keepEnglishLetter': True,
'charsetName': "windows-1250"
}
# flake8: noqa
| mit |
pico12/trading-with-python | sandbox/spreadCalculations.py | 78 | 1496 | '''
Created on 28 okt 2011
@author: jev
'''
from tradingWithPython import estimateBeta, Spread, returns, Portfolio, readBiggerScreener
from tradingWithPython.lib import yahooFinance
from pandas import DataFrame, Series
import numpy as np
import matplotlib.pyplot as plt
import os
symbols = ['SPY','IWM']
y = yahooFinance.HistData('temp.csv')
y.startDate = (2007,1,1)
df = y.loadSymbols(symbols,forceDownload=False)
#df = y.downloadData(symbols)
res = readBiggerScreener('CointPairs.csv')
#---check with spread scanner
#sp = DataFrame(index=symbols)
#
#sp['last'] = df.ix[-1,:]
#sp['targetCapital'] = Series({'SPY':100,'IWM':-100})
#sp['targetShares'] = sp['targetCapital']/sp['last']
#print sp
#The dollar-neutral ratio is about 1 * IWM - 1.7 * IWM. You will get the spread = zero (or probably very near zero)
#s = Spread(symbols, histClose = df)
#print s
#s.value.plot()
#print 'beta (returns)', estimateBeta(df[symbols[0]],df[symbols[1]],algo='returns')
#print 'beta (log)', estimateBeta(df[symbols[0]],df[symbols[1]],algo='log')
#print 'beta (standard)', estimateBeta(df[symbols[0]],df[symbols[1]],algo='standard')
#p = Portfolio(df)
#p.setShares([1, -1.7])
#p.value.plot()
quote = yahooFinance.getQuote(symbols)
print quote
s = Spread(symbols,histClose=df, estimateBeta = False)
s.setLast(quote['last'])
s.setShares(Series({'SPY':1,'IWM':-1.7}))
print s
#s.value.plot()
#s.plot()
fig = figure(2)
s.plot()
| bsd-3-clause |
chenjun0210/tensorflow | tensorflow/python/tools/strip_unused_lib.py | 37 | 4314 | # pylint: disable=g-bad-file-header
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities to remove unneeded nodes from a GraphDefs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
from google.protobuf import text_format
from tensorflow.core.framework import attr_value_pb2
from tensorflow.core.framework import graph_pb2
from tensorflow.core.framework import node_def_pb2
from tensorflow.python.framework import graph_util
from tensorflow.python.platform import gfile
def strip_unused(input_graph_def, input_node_names, output_node_names,
placeholder_type_enum):
"""Removes unused nodes from a GraphDef.
Args:
input_graph_def: A graph with nodes we want to prune.
input_node_names: A list of the nodes we use as inputs.
output_node_names: A list of the output nodes.
placeholder_type_enum: The AttrValue enum for the placeholder data type, or
a list that specifies one value per input node name.
Returns:
A GraphDef with all unnecessary ops removed.
"""
# Here we replace the nodes we're going to override as inputs with
# placeholders so that any unused nodes that are inputs to them are
# automatically stripped out by extract_sub_graph().
inputs_replaced_graph_def = graph_pb2.GraphDef()
for node in input_graph_def.node:
if node.name in input_node_names:
placeholder_node = node_def_pb2.NodeDef()
placeholder_node.op = "Placeholder"
placeholder_node.name = node.name
if isinstance(placeholder_type_enum, list):
input_node_index = input_node_names.index(node.name)
placeholder_node.attr["dtype"].CopyFrom(
attr_value_pb2.AttrValue(type=placeholder_type_enum[
input_node_index]))
else:
placeholder_node.attr["dtype"].CopyFrom(
attr_value_pb2.AttrValue(type=placeholder_type_enum))
if "_output_shapes" in node.attr:
placeholder_node.attr["_output_shapes"].CopyFrom(node.attr[
"_output_shapes"])
inputs_replaced_graph_def.node.extend([placeholder_node])
else:
inputs_replaced_graph_def.node.extend([copy.deepcopy(node)])
output_graph_def = graph_util.extract_sub_graph(inputs_replaced_graph_def,
output_node_names)
return output_graph_def
def strip_unused_from_files(input_graph, input_binary, output_graph,
output_binary, input_node_names, output_node_names,
placeholder_type_enum):
"""Removes unused nodes from a graph file."""
if not gfile.Exists(input_graph):
print("Input graph file '" + input_graph + "' does not exist!")
return -1
if not output_node_names:
print("You need to supply the name of a node to --output_node_names.")
return -1
input_graph_def = graph_pb2.GraphDef()
mode = "rb" if input_binary else "r"
with gfile.FastGFile(input_graph, mode) as f:
if input_binary:
input_graph_def.ParseFromString(f.read())
else:
text_format.Merge(f.read(), input_graph_def)
output_graph_def = strip_unused(input_graph_def,
input_node_names.split(","),
output_node_names.split(","),
placeholder_type_enum)
if output_binary:
with gfile.GFile(output_graph, "wb") as f:
f.write(output_graph_def.SerializeToString())
else:
with gfile.GFile(output_graph, "w") as f:
f.write(text_format.MessageToString(output_graph_def))
print("%d ops in the final graph." % len(output_graph_def.node))
| apache-2.0 |
monikasulik/django-oscar | sites/demo/apps/checkout/views.py | 35 | 5404 | from django.contrib import messages
from django import http
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from datacash.facade import Facade
from oscar.apps.checkout import views, exceptions
from oscar.apps.payment.forms import BankcardForm
from oscar.apps.payment.models import SourceType
from oscar.apps.order.models import BillingAddress
from .forms import BillingAddressForm
# Customise the core PaymentDetailsView to integrate Datacash
class PaymentDetailsView(views.PaymentDetailsView):
def check_payment_data_is_captured(self, request):
if request.method != "POST":
raise exceptions.FailedPreCondition(
url=reverse('checkout:payment-details'),
message=_("Please enter your payment details"))
def get_context_data(self, **kwargs):
ctx = super(PaymentDetailsView, self).get_context_data(**kwargs)
# Ensure newly instantiated instances of the bankcard and billing
# address forms are passed to the template context (when they aren't
# already specified).
if 'bankcard_form' not in kwargs:
ctx['bankcard_form'] = BankcardForm()
if 'billing_address_form' not in kwargs:
ctx['billing_address_form'] = self.get_billing_address_form(
ctx['shipping_address']
)
elif kwargs['billing_address_form'].is_valid():
# On the preview view, we extract the billing address into the
# template context so we can show it to the customer.
ctx['billing_address'] = kwargs[
'billing_address_form'].save(commit=False)
return ctx
def get_billing_address_form(self, shipping_address):
"""
Return an instantiated billing address form
"""
addr = self.get_default_billing_address()
if not addr:
return BillingAddressForm(shipping_address=shipping_address)
billing_addr = BillingAddress()
addr.populate_alternative_model(billing_addr)
return BillingAddressForm(shipping_address=shipping_address,
instance=billing_addr)
def handle_payment_details_submission(self, request):
# Validate the submitted forms
bankcard_form = BankcardForm(request.POST)
shipping_address = self.get_shipping_address(
self.request.basket)
address_form = BillingAddressForm(shipping_address, request.POST)
if address_form.is_valid() and bankcard_form.is_valid():
# If both forms are valid, we render the preview view with the
# forms hidden within the page. This seems odd but means we don't
# have to store sensitive details on the server.
return self.render_preview(
request, bankcard_form=bankcard_form,
billing_address_form=address_form)
# Forms are invalid - show them to the customer along with the
# validation errors.
return self.render_payment_details(
request, bankcard_form=bankcard_form,
billing_address_form=address_form)
def handle_place_order_submission(self, request):
bankcard_form = BankcardForm(request.POST)
shipping_address = self.get_shipping_address(
self.request.basket)
address_form = BillingAddressForm(shipping_address, request.POST)
if address_form.is_valid() and bankcard_form.is_valid():
# Forms still valid, let's submit an order
submission = self.build_submission(
order_kwargs={
'billing_address': address_form.save(commit=False),
},
payment_kwargs={
'bankcard_form': bankcard_form,
'billing_address_form': address_form
}
)
return self.submit(**submission)
# Must be DOM tampering as these forms were valid and were rendered in
# a hidden element. Hence, we don't need to be that friendly with our
# error message.
messages.error(request, _("Invalid submission"))
return http.HttpResponseRedirect(
reverse('checkout:payment-details'))
def handle_payment(self, order_number, total, **kwargs):
# Make request to DataCash - if there any problems (eg bankcard
# not valid / request refused by bank) then an exception would be
# raised and handled by the parent PaymentDetail view)
facade = Facade()
bankcard = kwargs['bankcard_form'].bankcard
datacash_ref = facade.pre_authorise(
order_number, total.incl_tax, bankcard)
# Request was successful - record the "payment source". As this
# request was a 'pre-auth', we set the 'amount_allocated' - if we had
# performed an 'auth' request, then we would set 'amount_debited'.
source_type, _ = SourceType.objects.get_or_create(name='Datacash')
source = source_type.sources.model(
source_type=source_type,
currency=total.currency,
amount_allocated=total.incl_tax,
reference=datacash_ref)
self.add_payment_source(source)
# Also record payment event
self.add_payment_event(
'pre-auth', total.incl_tax, reference=datacash_ref)
| bsd-3-clause |
maxamillion/ansible | lib/ansible/executor/discovery/python_target.py | 84 | 1234 | # Copyright: (c) 2018 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# FUTURE: this could be swapped out for our bundled version of distro to move more complete platform
# logic to the targets, so long as we maintain Py2.6 compat and don't need to do any kind of script assembly
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import platform
import io
import os
def read_utf8_file(path, encoding='utf-8'):
if not os.access(path, os.R_OK):
return None
with io.open(path, 'r', encoding=encoding) as fd:
content = fd.read()
return content
def get_platform_info():
result = dict(platform_dist_result=[])
if hasattr(platform, 'dist'):
result['platform_dist_result'] = platform.dist()
osrelease_content = read_utf8_file('/etc/os-release')
# try to fall back to /usr/lib/os-release
if not osrelease_content:
osrelease_content = read_utf8_file('/usr/lib/os-release')
result['osrelease_content'] = osrelease_content
return result
def main():
info = get_platform_info()
print(json.dumps(info))
if __name__ == '__main__':
main()
| gpl-3.0 |
wyc/django | tests/forms_tests/widget_tests/test_selectdatewidget.py | 144 | 20641 | from datetime import date
from django.forms import DateField, Form, SelectDateWidget
from django.test import override_settings
from django.utils import translation
from django.utils.dates import MONTHS_AP
from .base import WidgetTest
class SelectDateWidgetTest(WidgetTest):
maxDiff = None
widget = SelectDateWidget(
years=('2007', '2008', '2009', '2010', '2011', '2012', '2013', '2014', '2015', '2016'),
)
def test_render_empty(self):
self.check_html(self.widget, 'mydate', '', html=(
"""
<select name="mydate_month" id="id_mydate_month">
<option value="0">---</option>
<option value="1">January</option>
<option value="2">February</option>
<option value="3">March</option>
<option value="4">April</option>
<option value="5">May</option>
<option value="6">June</option>
<option value="7">July</option>
<option value="8">August</option>
<option value="9">September</option>
<option value="10">October</option>
<option value="11">November</option>
<option value="12">December</option>
</select>
<select name="mydate_day" id="id_mydate_day">
<option value="0">---</option>
<option value="1">1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="4">4</option>
<option value="5">5</option>
<option value="6">6</option>
<option value="7">7</option>
<option value="8">8</option>
<option value="9">9</option>
<option value="10">10</option>
<option value="11">11</option>
<option value="12">12</option>
<option value="13">13</option>
<option value="14">14</option>
<option value="15">15</option>
<option value="16">16</option>
<option value="17">17</option>
<option value="18">18</option>
<option value="19">19</option>
<option value="20">20</option>
<option value="21">21</option>
<option value="22">22</option>
<option value="23">23</option>
<option value="24">24</option>
<option value="25">25</option>
<option value="26">26</option>
<option value="27">27</option>
<option value="28">28</option>
<option value="29">29</option>
<option value="30">30</option>
<option value="31">31</option>
</select>
<select name="mydate_year" id="id_mydate_year">
<option value="0">---</option>
<option value="2007">2007</option>
<option value="2008">2008</option>
<option value="2009">2009</option>
<option value="2010">2010</option>
<option value="2011">2011</option>
<option value="2012">2012</option>
<option value="2013">2013</option>
<option value="2014">2014</option>
<option value="2015">2015</option>
<option value="2016">2016</option>
</select>
"""
))
def test_render_none(self):
"""
Rendering the None or '' values should yield the same output.
"""
self.assertHTMLEqual(
self.widget.render('mydate', None),
self.widget.render('mydate', ''),
)
def test_render_string(self):
self.check_html(self.widget, 'mydate', '2010-04-15', html=(
"""
<select name="mydate_month" id="id_mydate_month">
<option value="0">---</option>
<option value="1">January</option>
<option value="2">February</option>
<option value="3">March</option>
<option value="4" selected="selected">April</option>
<option value="5">May</option>
<option value="6">June</option>
<option value="7">July</option>
<option value="8">August</option>
<option value="9">September</option>
<option value="10">October</option>
<option value="11">November</option>
<option value="12">December</option>
</select>
<select name="mydate_day" id="id_mydate_day">
<option value="0">---</option>
<option value="1">1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="4">4</option>
<option value="5">5</option>
<option value="6">6</option>
<option value="7">7</option>
<option value="8">8</option>
<option value="9">9</option>
<option value="10">10</option>
<option value="11">11</option>
<option value="12">12</option>
<option value="13">13</option>
<option value="14">14</option>
<option value="15" selected="selected">15</option>
<option value="16">16</option>
<option value="17">17</option>
<option value="18">18</option>
<option value="19">19</option>
<option value="20">20</option>
<option value="21">21</option>
<option value="22">22</option>
<option value="23">23</option>
<option value="24">24</option>
<option value="25">25</option>
<option value="26">26</option>
<option value="27">27</option>
<option value="28">28</option>
<option value="29">29</option>
<option value="30">30</option>
<option value="31">31</option>
</select>
<select name="mydate_year" id="id_mydate_year">
<option value="0">---</option>
<option value="2007">2007</option>
<option value="2008">2008</option>
<option value="2009">2009</option>
<option value="2010" selected="selected">2010</option>
<option value="2011">2011</option>
<option value="2012">2012</option>
<option value="2013">2013</option>
<option value="2014">2014</option>
<option value="2015">2015</option>
<option value="2016">2016</option>
</select>
"""
))
def test_render_datetime(self):
self.assertHTMLEqual(
self.widget.render('mydate', date(2010, 4, 15)),
self.widget.render('mydate', '2010-04-15'),
)
def test_render_invalid_date(self):
"""
Invalid dates should still render the failed date.
"""
self.check_html(self.widget, 'mydate', '2010-02-31', html=(
"""
<select name="mydate_month" id="id_mydate_month">
<option value="0">---</option>
<option value="1">January</option>
<option value="2" selected="selected">February</option>
<option value="3">March</option>
<option value="4">April</option>
<option value="5">May</option>
<option value="6">June</option>
<option value="7">July</option>
<option value="8">August</option>
<option value="9">September</option>
<option value="10">October</option>
<option value="11">November</option>
<option value="12">December</option>
</select>
<select name="mydate_day" id="id_mydate_day">
<option value="0">---</option>
<option value="1">1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="4">4</option>
<option value="5">5</option>
<option value="6">6</option>
<option value="7">7</option>
<option value="8">8</option>
<option value="9">9</option>
<option value="10">10</option>
<option value="11">11</option>
<option value="12">12</option>
<option value="13">13</option>
<option value="14">14</option>
<option value="15">15</option>
<option value="16">16</option>
<option value="17">17</option>
<option value="18">18</option>
<option value="19">19</option>
<option value="20">20</option>
<option value="21">21</option>
<option value="22">22</option>
<option value="23">23</option>
<option value="24">24</option>
<option value="25">25</option>
<option value="26">26</option>
<option value="27">27</option>
<option value="28">28</option>
<option value="29">29</option>
<option value="30">30</option>
<option value="31" selected="selected">31</option>
</select>
<select name="mydate_year" id="id_mydate_year">
<option value="0">---</option>
<option value="2007">2007</option>
<option value="2008">2008</option>
<option value="2009">2009</option>
<option value="2010" selected="selected">2010</option>
<option value="2011">2011</option>
<option value="2012">2012</option>
<option value="2013">2013</option>
<option value="2014">2014</option>
<option value="2015">2015</option>
<option value="2016">2016</option>
</select>
"""
))
def test_custom_months(self):
widget = SelectDateWidget(months=MONTHS_AP, years=('2013',))
self.check_html(widget, 'mydate', '', html=(
"""
<select name="mydate_month" id="id_mydate_month">
<option value="0">---</option>
<option value="1">Jan.</option>
<option value="2">Feb.</option>
<option value="3">March</option>
<option value="4">April</option>
<option value="5">May</option>
<option value="6">June</option>
<option value="7">July</option>
<option value="8">Aug.</option>
<option value="9">Sept.</option>
<option value="10">Oct.</option>
<option value="11">Nov.</option>
<option value="12">Dec.</option>
</select>
<select name="mydate_day" id="id_mydate_day">
<option value="0">---</option>
<option value="1">1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="4">4</option>
<option value="5">5</option>
<option value="6">6</option>
<option value="7">7</option>
<option value="8">8</option>
<option value="9">9</option>
<option value="10">10</option>
<option value="11">11</option>
<option value="12">12</option>
<option value="13">13</option>
<option value="14">14</option>
<option value="15">15</option>
<option value="16">16</option>
<option value="17">17</option>
<option value="18">18</option>
<option value="19">19</option>
<option value="20">20</option>
<option value="21">21</option>
<option value="22">22</option>
<option value="23">23</option>
<option value="24">24</option>
<option value="25">25</option>
<option value="26">26</option>
<option value="27">27</option>
<option value="28">28</option>
<option value="29">29</option>
<option value="30">30</option>
<option value="31">31</option>
</select>
<select name="mydate_year" id="id_mydate_year">
<option value="0">---</option>
<option value="2013">2013</option>
</select>
"""
))
def test_selectdate_required(self):
class GetNotRequiredDate(Form):
mydate = DateField(widget=SelectDateWidget, required=False)
class GetRequiredDate(Form):
mydate = DateField(widget=SelectDateWidget, required=True)
self.assertFalse(GetNotRequiredDate().fields['mydate'].widget.is_required)
self.assertTrue(GetRequiredDate().fields['mydate'].widget.is_required)
def test_selectdate_empty_label(self):
w = SelectDateWidget(years=('2014',), empty_label='empty_label')
# Rendering the default state with empty_label setted as string.
self.assertInHTML('<option value="0">empty_label</option>', w.render('mydate', ''), count=3)
w = SelectDateWidget(years=('2014',), empty_label=('empty_year', 'empty_month', 'empty_day'))
# Rendering the default state with empty_label tuple.
self.assertHTMLEqual(
w.render('mydate', ''),
"""
<select name="mydate_month" id="id_mydate_month">
<option value="0">empty_month</option>
<option value="1">January</option>
<option value="2">February</option>
<option value="3">March</option>
<option value="4">April</option>
<option value="5">May</option>
<option value="6">June</option>
<option value="7">July</option>
<option value="8">August</option>
<option value="9">September</option>
<option value="10">October</option>
<option value="11">November</option>
<option value="12">December</option>
</select>
<select name="mydate_day" id="id_mydate_day">
<option value="0">empty_day</option>
<option value="1">1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="4">4</option>
<option value="5">5</option>
<option value="6">6</option>
<option value="7">7</option>
<option value="8">8</option>
<option value="9">9</option>
<option value="10">10</option>
<option value="11">11</option>
<option value="12">12</option>
<option value="13">13</option>
<option value="14">14</option>
<option value="15">15</option>
<option value="16">16</option>
<option value="17">17</option>
<option value="18">18</option>
<option value="19">19</option>
<option value="20">20</option>
<option value="21">21</option>
<option value="22">22</option>
<option value="23">23</option>
<option value="24">24</option>
<option value="25">25</option>
<option value="26">26</option>
<option value="27">27</option>
<option value="28">28</option>
<option value="29">29</option>
<option value="30">30</option>
<option value="31">31</option>
</select>
<select name="mydate_year" id="id_mydate_year">
<option value="0">empty_year</option>
<option value="2014">2014</option>
</select>
""",
)
self.assertRaisesMessage(ValueError, 'empty_label list/tuple must have 3 elements.',
SelectDateWidget, years=('2014',), empty_label=('not enough', 'values'))
@override_settings(USE_L10N=True)
@translation.override('nl')
def test_l10n(self):
w = SelectDateWidget(
years=('2007', '2008', '2009', '2010', '2011', '2012', '2013', '2014', '2015', '2016')
)
self.assertEqual(
w.value_from_datadict({'date_year': '2010', 'date_month': '8', 'date_day': '13'}, {}, 'date'),
'13-08-2010',
)
self.assertHTMLEqual(
w.render('date', '13-08-2010'),
"""
<select name="date_day" id="id_date_day">
<option value="0">---</option>
<option value="1">1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="4">4</option>
<option value="5">5</option>
<option value="6">6</option>
<option value="7">7</option>
<option value="8">8</option>
<option value="9">9</option>
<option value="10">10</option>
<option value="11">11</option>
<option value="12">12</option>
<option value="13" selected="selected">13</option>
<option value="14">14</option>
<option value="15">15</option>
<option value="16">16</option>
<option value="17">17</option>
<option value="18">18</option>
<option value="19">19</option>
<option value="20">20</option>
<option value="21">21</option>
<option value="22">22</option>
<option value="23">23</option>
<option value="24">24</option>
<option value="25">25</option>
<option value="26">26</option>
<option value="27">27</option>
<option value="28">28</option>
<option value="29">29</option>
<option value="30">30</option>
<option value="31">31</option>
</select>
<select name="date_month" id="id_date_month">
<option value="0">---</option>
<option value="1">januari</option>
<option value="2">februari</option>
<option value="3">maart</option>
<option value="4">april</option>
<option value="5">mei</option>
<option value="6">juni</option>
<option value="7">juli</option>
<option value="8" selected="selected">augustus</option>
<option value="9">september</option>
<option value="10">oktober</option>
<option value="11">november</option>
<option value="12">december</option>
</select>
<select name="date_year" id="id_date_year">
<option value="0">---</option>
<option value="2007">2007</option>
<option value="2008">2008</option>
<option value="2009">2009</option>
<option value="2010" selected="selected">2010</option>
<option value="2011">2011</option>
<option value="2012">2012</option>
<option value="2013">2013</option>
<option value="2014">2014</option>
<option value="2015">2015</option>
<option value="2016">2016</option>
</select>
""",
)
# Even with an invalid date, the widget should reflect the entered value (#17401).
self.assertEqual(w.render('mydate', '2010-02-30').count('selected="selected"'), 3)
# Years before 1900 should work.
w = SelectDateWidget(years=('1899',))
self.assertEqual(
w.value_from_datadict({'date_year': '1899', 'date_month': '8', 'date_day': '13'}, {}, 'date'),
'13-08-1899',
)
| bsd-3-clause |
mhnatiuk/phd_sociology_of_religion | scrapper/lib/python2.7/site-packages/scrapy/spider.py | 15 | 2307 | """
Base class for Scrapy spiders
See documentation in docs/topics/spiders.rst
"""
from scrapy import log
from scrapy.http import Request
from scrapy.utils.trackref import object_ref
from scrapy.utils.url import url_is_from_spider
from scrapy.utils.deprecate import create_deprecated_class
class Spider(object_ref):
"""Base class for scrapy spiders. All spiders must inherit from this
class.
"""
name = None
def __init__(self, name=None, **kwargs):
if name is not None:
self.name = name
elif not getattr(self, 'name', None):
raise ValueError("%s must have a name" % type(self).__name__)
self.__dict__.update(kwargs)
if not hasattr(self, 'start_urls'):
self.start_urls = []
def log(self, message, level=log.DEBUG, **kw):
"""Log the given messages at the given log level. Always use this
method to send log messages from your spider
"""
log.msg(message, spider=self, level=level, **kw)
def set_crawler(self, crawler):
assert not hasattr(self, '_crawler'), "Spider already bounded to %s" % crawler
self._crawler = crawler
@property
def crawler(self):
assert hasattr(self, '_crawler'), "Spider not bounded to any crawler"
return self._crawler
@property
def settings(self):
return self.crawler.settings
def start_requests(self):
for url in self.start_urls:
yield self.make_requests_from_url(url)
def make_requests_from_url(self, url):
return Request(url, dont_filter=True)
def parse(self, response):
raise NotImplementedError
@classmethod
def handles_request(cls, request):
return url_is_from_spider(request.url, cls)
def __str__(self):
return "<%s %r at 0x%0x>" % (type(self).__name__, self.name, id(self))
__repr__ = __str__
BaseSpider = create_deprecated_class('BaseSpider', Spider)
class ObsoleteClass(object):
def __init__(self, message):
self.message = message
def __getattr__(self, name):
raise AttributeError(self.message)
spiders = ObsoleteClass("""
"from scrapy.spider import spiders" no longer works - use "from scrapy.project import crawler" and then access crawler.spiders attribute"
""")
| gpl-2.0 |
DavidIngraham/ardupilot | libraries/AP_HAL_ChibiOS/hwdef/scripts/STM32F103xB.py | 10 | 2607 | #!/usr/bin/env python
'''
these tables are generated from the STM32 datasheets for the
STM32F103x8
'''
# additional build information for ChibiOS
build = {
"CHIBIOS_STARTUP_MK" : "os/common/startup/ARMCMx/compilers/GCC/mk/startup_stm32f1xx.mk",
"CHIBIOS_PLATFORM_MK" : "os/hal/ports/STM32/STM32F1xx/platform.mk",
"CHPRINTF_USE_FLOAT" : 'no',
"USE_FPU" : 'no'
}
pincount = {
'A': 16,
'B': 16,
'C': 16,
'D': 16,
'E': 16
}
# MCU parameters
mcu = {
# location of MCU serial number
'UDID_START' : 0x1FFFF7E8,
'RAM_MAP' : [
(0x20000000, 20, 1), # main memory, DMA safe
],
'EXPECTED_CLOCK' : 72000000
}
ADC1_map = {
# format is PIN : ADC1_CHAN
"PA0" : 0,
"PA1" : 1,
"PA2" : 2,
"PA3" : 3,
"PA4" : 4,
"PA5" : 5,
"PA6" : 6,
"PA7" : 7,
"PB0" : 8,
"PB1" : 9,
"PC0" : 10,
"PC1" : 11,
"PC2" : 12,
"PC3" : 13,
"PC4" : 14,
"PC5" : 15,
}
DMA_Map = {
# format is (DMA_TABLE, StreamNum, Channel)
"ADC1" : [(1,1,0)],
"TIM1_CH1" : [(1,2,0)],
"TIM1_CH3" : [(1,6,0)],
"TIM1_CH4" : [(1,4,0)],
"TIM1_UP" : [(1,5,0)],
"TIM2_CH1" : [(1,5,0)],
"TIM2_CH2" : [(1,7,0)],
"TIM2_CH3" : [(1,1,0)],
"TIM2_CH4" : [(1,7,0)],
"TIM2_UP" : [(1,2,0)],
"TIM3_CH1" : [(1,6,0)],
"TIM3_CH3" : [(1,2,0)],
"TIM3_CH4" : [(1,3,0)],
"TIM3_UP" : [(1,3,0)],
"TIM4_CH1" : [(1,1,0)],
"TIM4_CH2" : [(1,4,0)],
"TIM4_CH3" : [(1,5,0)],
"TIM4_UP" : [(1,7,0)],
"TIM5_CH1" : [(2,5,0)],
"TIM5_CH2" : [(2,4,0)],
"TIM5_CH3" : [(2,2,0)],
"TIM5_CH4" : [(2,1,0)],
"TIM5_UP" : [(2,2,0)],
"TIM8_CH1" : [(2,3,0)],
"TIM8_CH2" : [(2,5,0)],
"TIM8_CH3" : [(2,1,0)],
"TIM8_CH4" : [(2,2,0)],
"TIM8_UP" : [(2,1,0)],
"TIM6_UP" : [(2,3,0)],
"TIM7_UP" : [(2,4,0)],
"I2C1_RX" : [(1,7,0)],
"I2C1_TX" : [(1,6,0)],
"I2C2_RX" : [(1,5,0)],
"I2C2_TX" : [(1,4,0)],
"SPI1_RX" : [(1,2,0)],
"SPI1_TX" : [(1,3,0)],
"SPI2_RX" : [(1,4,0)],
"SPI2_TX" : [(1,5,0)],
"SPI3_RX" : [(2,1,0)],
"SPI3_TX" : [(2,2,0)],
"UART4_RX" : [(2,3,0)],
"UART4_TX" : [(2,5,0)],
"USART1_RX" : [(1,5,0)],
"USART1_TX" : [(1,4,0)],
"USART2_RX" : [(1,6,0)],
"USART2_TX" : [(1,7,0)],
"USART3_RX" : [(1,3,0)],
"USART3_TX" : [(1,2,0)],
}
| gpl-3.0 |
pshen/ansible | lib/ansible/module_utils/pure.py | 71 | 3161 | # -*- coding: utf-8 -*-
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Simon Dodsley <[email protected]>,2017
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
HAS_PURESTORAGE = True
try:
from purestorage import purestorage
except ImportError:
HAS_PURESTORAGE = False
from functools import wraps
from os import environ
from os import path
import platform
VERSION = 1.0
USER_AGENT_BASE = 'Ansible'
def get_system(module):
"""Return System Object or Fail"""
user_agent = '%(base)s %(class)s/%(version)s (%(platform)s)' % {
'base': USER_AGENT_BASE,
'class': __name__,
'version': VERSION,
'platform': platform.platform()
}
array_name = module.params['fa_url']
api = module.params['api_token']
if array_name and api:
system = purestorage.FlashArray(array_name, api_token=api, user_agent=user_agent)
elif environ.get('PUREFA_URL') and environ.get('PUREFA_API'):
system = purestorage.FlashArray(environ.get('PUREFA_URL'), api_token=(environ.get('PUREFA_API')), user_agent=user_agent)
else:
module.fail_json(msg="You must set PUREFA_URL and PUREFA_API environment variables or the fa_url and api_token module arguments")
try:
system.get()
except Exception:
module.fail_json(msg="Pure Storage FlashArray authentication failed. Check your credentials")
return system
def purefa_argument_spec():
"""Return standard base dictionary used for the argument_spec argument in AnsibleModule"""
return dict(
fa_url=dict(),
api_token=dict(no_log=True),
)
| gpl-3.0 |
CompassionCH/bank-payment | account_payment_partner/models/account_invoice.py | 1 | 5160 | # -*- coding: utf-8 -*-
# Copyright 2014-16 Akretion - Alexis de Lattre <[email protected]>
# Copyright 2014 Serv. Tecnol. Avanzados - Pedro M. Baeza
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import models, fields, api
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
payment_mode_id = fields.Many2one(
comodel_name='account.payment.mode', string="Payment Mode",
ondelete='restrict',
readonly=True, states={'draft': [('readonly', False)]})
bank_account_required = fields.Boolean(
related='payment_mode_id.payment_method_id.bank_account_required',
readonly=True)
partner_bank_id = fields.Many2one(ondelete='restrict')
@api.onchange('partner_id', 'company_id')
def _onchange_partner_id(self):
res = super(AccountInvoice, self)._onchange_partner_id()
if self.partner_id:
if self.type == 'in_invoice':
pay_mode = self.partner_id.supplier_payment_mode_id
self.payment_mode_id = pay_mode
if (
pay_mode and
pay_mode.payment_type == 'outbound' and
pay_mode.payment_method_id.bank_account_required and
self.commercial_partner_id.bank_ids):
self.partner_bank_id =\
self.commercial_partner_id.bank_ids[0]
elif self.type == 'out_invoice':
# No bank account assignation is done here as this is only
# needed for printing purposes and it can conflict with
# SEPA direct debit payments. Current report prints it.
self.payment_mode_id = self.partner_id.customer_payment_mode_id
else:
self.payment_mode_id = False
if self.type == 'in_invoice':
self.partner_bank_id = False
return res
@api.model
def create(self, vals):
"""Fill the payment_mode_id from the partner if none is provided on
creation, using same method as upstream."""
onchanges = {
'_onchange_partner_id': ['payment_mode_id'],
}
for onchange_method, changed_fields in onchanges.items():
if any(f not in vals for f in changed_fields):
invoice = self.new(vals)
getattr(invoice, onchange_method)()
for field in changed_fields:
if field not in vals and invoice[field]:
vals[field] = invoice._fields[field].convert_to_write(
invoice[field], invoice,
)
return super(AccountInvoice, self).create(vals)
@api.onchange('payment_mode_id')
def payment_mode_id_change(self):
if (
self.payment_mode_id and
self.payment_mode_id.payment_type == 'outbound' and
not self.payment_mode_id.payment_method_id.
bank_account_required):
self.partner_bank_id = False
elif not self.payment_mode_id:
self.partner_bank_id = False
@api.model
def line_get_convert(self, line, part):
"""Copy payment mode from invoice to account move line"""
res = super(AccountInvoice, self).line_get_convert(line, part)
if line.get('type') == 'dest' and line.get('invoice_id'):
invoice = self.browse(line['invoice_id'])
res['payment_mode_id'] = invoice.payment_mode_id.id or False
return res
# I think copying payment mode from invoice to refund by default
# is a good idea because the most common way of "paying" a refund is to
# deduct it on the payment of the next invoice (and OCA/bank-payment
# allows to have negative payment lines since March 2016)
@api.model
def _prepare_refund(
self, invoice, date_invoice=None, date=None, description=None,
journal_id=None):
vals = super(AccountInvoice, self)._prepare_refund(
invoice, date_invoice=date_invoice, date=date,
description=description, journal_id=journal_id)
vals['payment_mode_id'] = invoice.payment_mode_id.id
if invoice.type == 'in_invoice':
vals['partner_bank_id'] = invoice.partner_bank_id.id
return vals
def partner_banks_to_show(self):
self.ensure_one()
if self.partner_bank_id:
return self.partner_bank_id
if self.payment_mode_id.show_bank_account_from_journal:
if self.payment_mode_id.bank_account_link == 'fixed':
return self.payment_mode_id.fixed_journal_id.bank_account_id
else:
return self.payment_mode_id.variable_journal_ids.mapped(
'bank_account_id')
if self.payment_mode_id.payment_method_id.code == \
'sepa_direct_debit': # pragma: no cover
return (self.mandate_id.partner_bank_id or
self.partner_id.valid_mandate_id.partner_bank_id)
# Return this as empty recordset
return self.partner_bank_id
| agpl-3.0 |
dharmabumstead/ansible | test/units/plugins/action/test_raw.py | 44 | 3774 | # (c) 2016, Saran Ahluwalia <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.errors import AnsibleActionFail
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock, Mock
from ansible.plugins.action.raw import ActionModule
from ansible.playbook.task import Task
class TestCopyResultExclude(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
# The current behavior of the raw aciton in regards to executable is currently in question;
# the test_raw_executable_is_not_empty_string verifies the current behavior (whether it is desireed or not.
# Please refer to the following for context:
# Issue: https://github.com/ansible/ansible/issues/16054
# PR: https://github.com/ansible/ansible/pull/16085
def test_raw_executable_is_not_empty_string(self):
play_context = Mock()
task = MagicMock(Task)
task.async_val = False
connection = Mock()
task.args = {'_raw_params': 'Args1'}
play_context.check_mode = False
self.mock_am = ActionModule(task, connection, play_context, loader=None, templar=None, shared_loader_obj=None)
self.mock_am._low_level_execute_command = Mock(return_value={})
self.mock_am.display = Mock()
self.mock_am.run()
self.mock_am._low_level_execute_command.assert_called_with('Args1', executable=False)
def test_raw_check_mode_is_True(self):
play_context = Mock()
task = MagicMock(Task)
task.async_val = False
connection = Mock()
task.args = {'_raw_params': 'Args1'}
play_context.check_mode = True
try:
self.mock_am = ActionModule(task, connection, play_context, loader=None, templar=None, shared_loader_obj=None)
except AnsibleActionFail:
pass
def test_raw_test_environment_is_None(self):
play_context = Mock()
task = MagicMock(Task)
task.async_val = False
connection = Mock()
task.args = {'_raw_params': 'Args1'}
task.environment = None
play_context.check_mode = False
self.mock_am = ActionModule(task, connection, play_context, loader=None, templar=None, shared_loader_obj=None)
self.mock_am._low_level_execute_command = Mock(return_value={})
self.mock_am.display = Mock()
self.assertEqual(task.environment, None)
def test_raw_task_vars_is_not_None(self):
play_context = Mock()
task = MagicMock(Task)
task.async_val = False
connection = Mock()
task.args = {'_raw_params': 'Args1'}
task.environment = None
play_context.check_mode = False
self.mock_am = ActionModule(task, connection, play_context, loader=None, templar=None, shared_loader_obj=None)
self.mock_am._low_level_execute_command = Mock(return_value={})
self.mock_am.display = Mock()
self.mock_am.run(task_vars={'a': 'b'})
self.assertEqual(task.environment, None)
| gpl-3.0 |
stevec7/ratking | ratking/engine.py | 2 | 3052 | import ast
import ConfigParser
import glob
import grp
import importlib
import multiprocessing
import os
import sys
from drop_privileges import drop_privileges
from jobhandler import JobCtl
from pwd import getpwnam
class SchedCtl(object):
def __init__(self, sched, config, logging):
self.sched = sched
self.config = config
self.logging = logging
# create an object to link to the job control class. only really used by this class to import
# jobs in the $RATKINGROOT/etc/jobs.d directory
self.job_control_instance = JobCtl(self.sched, self.config, self.logging)
def check_sched(self):
"""Checks to see if scheduler is running"""
if self.sched.running is True:
return True, "Scheduler is running."
else:
return False, "Scheduler is stopped."
def import_jobs(self):
"""read jobs from persistent directory, specified in the config file, under option job_dir"""
for infile in glob.glob( os.path.join(self.config.get('main', 'job_dir'), '*.conf') ):
self.logging.info("Trying to import jobfile: %s", infile)
try:
self.job_control_instance.add_job(infile, 'initial_startup', 'initial_startup')
except RatkingException as error:
print "RatkingException: Error adding job, jobfile: %s. " % infile
pass
except ConfigParser.ParsingError as error:
self.logging.error("ConfigParser.ParsingError: %s. ", error)
pass
def initialize(self):
"""Starts the scheduler for the first time. Only to be used in ratkingd daemon"""
self.sched.start()
return True
def start_sched(self, user):
"""Start the AP Scheduler. Return 'True' if success."""
if user != 'root':
return False, "Only root can stop/start scheduling."
if self.sched.running is True:
return False, "Scheduler already running."
else:
try:
self.sched.start()
except exceptions.AttributeError as e:
raise RatkingException("Error starting scheduling: %s" % e)
return True, "Scheduler started."
def stop_sched(self, user):
"""Stop the AP Scheduler. Return 'True' if success."""
if user != 'root':
return False, "Only root can stop/start scheduling."
if self.sched.running is False:
return False, "Scheduler is not running."
else:
try:
self.sched.shutdown()
except exceptions.AttributeError as e:
raise RatkingException("Error stopping scheduling: %s" % e)
self.sched.shutdown()
return True, "Ratkingd job scheduling has been stopped."
class RatkingException(Exception):
def __init__(self, message):
self.message = message
def __str__(self):
return repr(self.message)
| mit |
franek/weboob | modules/ing/pages/transfer.py | 1 | 9640 | # -*- coding: utf-8 -*-
# Copyright(C) 2009-2011 Romain Bignon, Florent Fourcot
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from decimal import Decimal
from weboob.tools.capabilities.bank.transactions import FrenchTransaction
from weboob.tools.captcha.virtkeyboard import VirtKeyboardError
from weboob.capabilities.bank import Recipient, AccountNotFound, Transfer
from weboob.tools.browser import BasePage, BrokenPageError
from weboob.tools.mech import ClientForm
from .login import INGVirtKeyboard
from logging import error
__all__ = ['TransferPage']
class TransferPage(BasePage):
def on_loaded(self):
pass
def get_recipients(self):
# First, internals recipients
table = self.document.xpath('//table[@id="transfer_form:receiptAccount"]')
for tr in table[0].xpath('tbody/tr'):
tds = tr.xpath('td')
id = tds[0].xpath('input')[0].attrib['value']
name = tds[0].xpath('label')[0].text
name += u" " + tds[1].xpath('label')[0].text.replace('\n', '')
name += u" " + tds[2].xpath('label')[0].text.replace('\n', '')
recipient = Recipient()
recipient.id = id
recipient.label = name
recipient._type = "int"
yield recipient
# Second, externals recipients
select = self.document.xpath('//select[@id="transfer_form:externalAccounts"]')
if len(select) > 0:
recipients = select[0].xpath('option')
recipients.pop(0)
for option in recipients:
recipient = Recipient()
recipient.id = option.attrib['value']
recipient.label = option.text
recipient._type = "ext"
yield recipient
def ischecked(self, account):
id = account.id
# remove prefix (CC-, LA-, ...)
if "-" in id:
id = id.split('-')[1]
option = self.document.xpath('//input[@value="%s"]' % id)
if len(option) == 0:
raise AccountNotFound()
else:
option = option[0]
try:
if option.attrib["checked"] == "checked":
return True
else:
return False
except:
return False
def transfer(self, recipient, amount, reason):
self.browser.select_form("transfer_form")
self.browser.set_all_readonly(False)
for a in self.browser.controls[:]:
#for label in a.get_labels():
if "transfer_form:_link_hidden_" in str(a) or "transfer_form:j_idcl" in str(a):
self.browser.controls.remove(a)
if "transfer_form:valide" in str(a):
self.browser.controls.remove(a)
self.browser.controls.append(ClientForm.TextControl('text',
'AJAXREQUEST', {'value': "_viewRoot"}))
self.browser.controls.append(ClientForm.TextControl('text',
'AJAX:EVENTS_COUNT', {'value': "1"}))
self.browser['transfer_form:transferMotive'] = reason
self.browser.controls.append(ClientForm.TextControl('text', 'transfer_form:valide', {'value': "transfer_form:valide"}))
self.browser['transfer_form:validateDoTransfer'] = "needed"
self.browser['transfer_form:transferAmount'] = str(amount)
if recipient._type == "int":
self.browser['transfer_recipient_radio'] = [recipient.id]
else:
self.browser['transfer_form:externalAccounts'] = [recipient.id]
self.browser.submit()
def buildonclick(self, recipient, account):
javax = self.document.xpath('//input[@id="javax.faces.ViewState"]')[0].attrib['value']
if recipient._type == "ext":
select = self.document.xpath('//select[@id="transfer_form:externalAccounts"]')[0]
onclick = select.attrib['onchange']
params = onclick.split(',')[3].split('{')[1]
idparam = params.split("'")[1]
param = params.split("'")[3]
request = self.browser.buildurl('', ("AJAXREQUEST", "transfer_form:transfer_radios_form"),
("transfer_form:generalMessages", ""),
("transfer_issuer_radio", account.id[3:]),
("transfer_form:externalAccounts", recipient.id),
("transfer_date", 0),
("transfer_form:transferAmount", ""),
("transfer_form:transferMotive", ""),
("transfer_form:validateDoTransfer", "needed"),
("transfer_form", "transfer_form"),
("autoScrol", ""),
("javax.faces.ViewState", javax),
(idparam, param))
request = request[1:] # remove the "?"
return request
elif recipient._type == "int":
for input in self.document.xpath('//input[@value=%s]' % recipient.id):
if input.attrib['name'] == "transfer_recipient_radio":
onclick = input.attrib['onclick']
break
# Get something like transfer_form:issueAccount:0:click
params = onclick.split(',')[3].split('{')[1]
idparam = params.split("'")[1]
param = params.split("'")[3]
request = self.browser.buildurl('', ("AJAXREQUEST", "transfer_form:transfer_radios_form"),
('transfer_issuer_radio', account.id[3:]),
("transfer_recipient_radio", recipient.id),
("transfer_form:externalAccounts", "na"),
("transfer_date", 0),
("transfer_form:transferAmount", ""),
("transfer_form:transferMotive", ""),
("transfer_form:validateDoTransfer", "needed"),
("transfer_form", "transfer_form"),
("autoScroll", ""),
("javax.faces.ViewState", javax),
(idparam, param))
request = request[1:]
return request
class TransferConfirmPage(BasePage):
def on_loaded(self):
pass
def confirm(self, password):
try:
vk = INGVirtKeyboard(self)
except VirtKeyboardError, err:
error("Error: %s" % err)
return
realpasswd = ""
span = self.document.find('//span[@id="digitpadtransfer"]')
i = 0
for font in span.getiterator('font'):
if font.attrib.get('class') == "vide":
realpasswd += password[i]
i += 1
confirmform = None
for form in self.document.xpath('//form'):
try:
if form.attrib['name'][0:4] == "j_id":
confirmform = form
break
except:
continue
if confirmform is None:
raise BrokenPageError('Unable to find confirm form')
formname = confirmform.attrib['name']
self.browser.logger.debug('We are looking for : ' + realpasswd)
self.browser.select_form(formname)
self.browser.set_all_readonly(False)
for a in self.browser.controls[:]:
if "_link_hidden_" in str(a) or "j_idcl" in str(a):
self.browser.controls.remove(a)
coordinates = vk.get_string_code(realpasswd)
self.browser.logger.debug("Coordonates: " + coordinates)
self.browser.controls.append(ClientForm.TextControl('text',
'AJAXREQUEST', {'value': '_viewRoot'}))
self.browser.controls.append(ClientForm.TextControl(
'text', '%s:mrgtransfer' % formname,
{'value': '%s:mrgtransfer' % formname}))
self.browser['%s:mrltransfer' % formname] = coordinates
self.browser.submit(nologin=True)
def recap(self):
if len(self.document.xpath('//p[@class="alert alert-success"]')) == 0:
raise BrokenPageError('Unable to find confirmation')
div = self.document.find(
'//div[@class="encadre transfert-validation"]')
transfer = Transfer(0)
transfer.amount = Decimal(FrenchTransaction.clean_amount(
div.xpath('.//label[@id="confirmtransferAmount"]')[0].text))
transfer.origin = div.xpath(
'.//span[@id="confirmfromAccount"]')[0].text
transfer.recipient = div.xpath(
'.//span[@id="confirmtoAccount"]')[0].text
transfer.reason = unicode(
div.xpath('.//span[@id="confirmtransferMotive"]')[0].text)
return transfer
| agpl-3.0 |
FlintHill/SUAS-Competition | env/lib/python2.7/site-packages/setuptools/ssl_support.py | 64 | 8492 | import os
import socket
import atexit
import re
import functools
from setuptools.extern.six.moves import urllib, http_client, map, filter
from pkg_resources import ResolutionError, ExtractionError
try:
import ssl
except ImportError:
ssl = None
__all__ = [
'VerifyingHTTPSHandler', 'find_ca_bundle', 'is_available', 'cert_paths',
'opener_for'
]
cert_paths = """
/etc/pki/tls/certs/ca-bundle.crt
/etc/ssl/certs/ca-certificates.crt
/usr/share/ssl/certs/ca-bundle.crt
/usr/local/share/certs/ca-root.crt
/etc/ssl/cert.pem
/System/Library/OpenSSL/certs/cert.pem
/usr/local/share/certs/ca-root-nss.crt
/etc/ssl/ca-bundle.pem
""".strip().split()
try:
HTTPSHandler = urllib.request.HTTPSHandler
HTTPSConnection = http_client.HTTPSConnection
except AttributeError:
HTTPSHandler = HTTPSConnection = object
is_available = ssl is not None and object not in (HTTPSHandler, HTTPSConnection)
try:
from ssl import CertificateError, match_hostname
except ImportError:
try:
from backports.ssl_match_hostname import CertificateError
from backports.ssl_match_hostname import match_hostname
except ImportError:
CertificateError = None
match_hostname = None
if not CertificateError:
class CertificateError(ValueError):
pass
if not match_hostname:
def _dnsname_match(dn, hostname, max_wildcards=1):
"""Matching according to RFC 6125, section 6.4.3
http://tools.ietf.org/html/rfc6125#section-6.4.3
"""
pats = []
if not dn:
return False
# Ported from python3-syntax:
# leftmost, *remainder = dn.split(r'.')
parts = dn.split(r'.')
leftmost = parts[0]
remainder = parts[1:]
wildcards = leftmost.count('*')
if wildcards > max_wildcards:
# Issue #17980: avoid denials of service by refusing more
# than one wildcard per fragment. A survey of established
# policy among SSL implementations showed it to be a
# reasonable choice.
raise CertificateError(
"too many wildcards in certificate DNS name: " + repr(dn))
# speed up common case w/o wildcards
if not wildcards:
return dn.lower() == hostname.lower()
# RFC 6125, section 6.4.3, subitem 1.
# The client SHOULD NOT attempt to match a presented identifier in which
# the wildcard character comprises a label other than the left-most label.
if leftmost == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append('[^.]+')
elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
# RFC 6125, section 6.4.3, subitem 3.
# The client SHOULD NOT attempt to match a presented identifier
# where the wildcard character is embedded within an A-label or
# U-label of an internationalized domain name.
pats.append(re.escape(leftmost))
else:
# Otherwise, '*' matches any dotless string, e.g. www*
pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
# add the remaining fragments, ignore any wildcards
for frag in remainder:
pats.append(re.escape(frag))
pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
return pat.match(hostname)
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate")
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or "
"subjectAltName fields were found")
class VerifyingHTTPSHandler(HTTPSHandler):
"""Simple verifying handler: no auth, subclasses, timeouts, etc."""
def __init__(self, ca_bundle):
self.ca_bundle = ca_bundle
HTTPSHandler.__init__(self)
def https_open(self, req):
return self.do_open(
lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw), req
)
class VerifyingHTTPSConn(HTTPSConnection):
"""Simple verifying connection: no auth, subclasses, timeouts, etc."""
def __init__(self, host, ca_bundle, **kw):
HTTPSConnection.__init__(self, host, **kw)
self.ca_bundle = ca_bundle
def connect(self):
sock = socket.create_connection(
(self.host, self.port), getattr(self, 'source_address', None)
)
# Handle the socket if a (proxy) tunnel is present
if hasattr(self, '_tunnel') and getattr(self, '_tunnel_host', None):
self.sock = sock
self._tunnel()
# http://bugs.python.org/issue7776: Python>=3.4.1 and >=2.7.7
# change self.host to mean the proxy server host when tunneling is
# being used. Adapt, since we are interested in the destination
# host for the match_hostname() comparison.
actual_host = self._tunnel_host
else:
actual_host = self.host
if hasattr(ssl, 'create_default_context'):
ctx = ssl.create_default_context(cafile=self.ca_bundle)
self.sock = ctx.wrap_socket(sock, server_hostname=actual_host)
else:
# This is for python < 2.7.9 and < 3.4?
self.sock = ssl.wrap_socket(
sock, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_bundle
)
try:
match_hostname(self.sock.getpeercert(), actual_host)
except CertificateError:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
raise
def opener_for(ca_bundle=None):
"""Get a urlopen() replacement that uses ca_bundle for verification"""
return urllib.request.build_opener(
VerifyingHTTPSHandler(ca_bundle or find_ca_bundle())
).open
# from jaraco.functools
def once(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
if not hasattr(func, 'always_returns'):
func.always_returns = func(*args, **kwargs)
return func.always_returns
return wrapper
@once
def get_win_certfile():
try:
import wincertstore
except ImportError:
return None
class CertFile(wincertstore.CertFile):
def __init__(self):
super(CertFile, self).__init__()
atexit.register(self.close)
def close(self):
try:
super(CertFile, self).close()
except OSError:
pass
_wincerts = CertFile()
_wincerts.addstore('CA')
_wincerts.addstore('ROOT')
return _wincerts.name
def find_ca_bundle():
"""Return an existing CA bundle path, or None"""
extant_cert_paths = filter(os.path.isfile, cert_paths)
return (
get_win_certfile()
or next(extant_cert_paths, None)
or _certifi_where()
)
def _certifi_where():
try:
return __import__('certifi').where()
except (ImportError, ResolutionError, ExtractionError):
pass
| mit |
daonb/Open-Knesset | auxiliary/migrations/0013_auto__add_tagkeyphrase.py | 14 | 7851 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'TagKeyphrase'
db.create_table(u'auxiliary_tagkeyphrase', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('tag', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['tagging.Tag'])),
('phrase', self.gf('django.db.models.fields.CharField')(max_length=100)),
))
db.send_create_signal(u'auxiliary', ['TagKeyphrase'])
def backwards(self, orm):
# Deleting model 'TagKeyphrase'
db.delete_table(u'auxiliary_tagkeyphrase')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'auxiliary.feedback': {
'Meta': {'object_name': 'Feedback'},
'content': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'suggested_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'suggested_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'feedback'", 'null': 'True', 'to': u"orm['auth.User']"}),
'url': ('django.db.models.fields.TextField', [], {}),
'user_agent': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'auxiliary.tagkeyphrase': {
'Meta': {'object_name': 'TagKeyphrase'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phrase': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['tagging.Tag']"})
},
u'auxiliary.tagsuggestion': {
'Meta': {'object_name': 'TagSuggestion'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.TextField', [], {'unique': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'suggested_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'tagsuggestion'", 'null': 'True', 'to': u"orm['auth.User']"})
},
u'auxiliary.tagsynonym': {
'Meta': {'object_name': 'TagSynonym'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'synonym_tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'synonym_synonym_tag'", 'unique': 'True', 'to': u"orm['tagging.Tag']"}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'synonym_proper_tag'", 'to': u"orm['tagging.Tag']"})
},
u'auxiliary.tidbit': {
'Meta': {'object_name': 'Tidbit'},
'button_link': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'button_text': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content': ('tinymce.models.HTMLField', [], {}),
'icon': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'ordering': ('django.db.models.fields.IntegerField', [], {'default': '20', 'db_index': 'True'}),
'photo': ('django.db.models.fields.files.ImageField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'suggested_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'tidbits'", 'null': 'True', 'to': u"orm['auth.User']"}),
'title': ('django.db.models.fields.CharField', [], {'default': "u'Did you know ?'", 'max_length': '40'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'tagging.tag': {
'Meta': {'ordering': "('name',)", 'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'})
}
}
complete_apps = ['auxiliary'] | bsd-3-clause |
larsbutler/coveragepy | coverage/test_helpers.py | 1 | 9946 | # Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://bitbucket.org/ned/coveragepy/src/default/NOTICE.txt
"""Mixin classes to help make good tests."""
import atexit
import collections
import os
import random
import shutil
import sys
import tempfile
import textwrap
from coverage.backunittest import TestCase
from coverage.backward import StringIO, to_bytes
class Tee(object):
"""A file-like that writes to all the file-likes it has."""
def __init__(self, *files):
"""Make a Tee that writes to all the files in `files.`"""
self._files = files
if hasattr(files[0], "encoding"):
self.encoding = files[0].encoding
def write(self, data):
"""Write `data` to all the files."""
for f in self._files:
f.write(data)
def flush(self):
"""Flush the data on all the files."""
for f in self._files:
f.flush()
if 0:
# Use this if you need to use a debugger, though it makes some tests
# fail, I'm not sure why...
def __getattr__(self, name):
return getattr(self._files[0], name)
class ModuleAwareMixin(TestCase):
"""A test case mixin that isolates changes to sys.modules."""
def setUp(self):
super(ModuleAwareMixin, self).setUp()
# Record sys.modules here so we can restore it in cleanup_modules.
self.old_modules = dict(sys.modules)
self.addCleanup(self.cleanup_modules)
def cleanup_modules(self):
"""Remove any new modules imported during the test run.
This lets us import the same source files for more than one test.
"""
for m in [m for m in sys.modules if m not in self.old_modules]:
del sys.modules[m]
class SysPathAwareMixin(TestCase):
"""A test case mixin that isolates changes to sys.path."""
def setUp(self):
super(SysPathAwareMixin, self).setUp()
self.old_syspath = sys.path[:]
self.addCleanup(self.cleanup_syspath)
def cleanup_syspath(self):
"""Restore the original sys.path."""
sys.path = self.old_syspath
class EnvironmentAwareMixin(TestCase):
"""A test case mixin that isolates changes to the environment."""
def setUp(self):
super(EnvironmentAwareMixin, self).setUp()
# Record environment variables that we changed with set_environ.
self.environ_undos = {}
self.addCleanup(self.cleanup_environ)
def set_environ(self, name, value):
"""Set an environment variable `name` to be `value`.
The environment variable is set, and record is kept that it was set,
so that `cleanup_environ` can restore its original value.
"""
if name not in self.environ_undos:
self.environ_undos[name] = os.environ.get(name)
os.environ[name] = value
def cleanup_environ(self):
"""Undo all the changes made by `set_environ`."""
for name, value in self.environ_undos.items():
if value is None:
del os.environ[name]
else:
os.environ[name] = value
class StdStreamCapturingMixin(TestCase):
"""A test case mixin that captures stdout and stderr."""
def setUp(self):
super(StdStreamCapturingMixin, self).setUp()
# Capture stdout and stderr so we can examine them in tests.
# nose keeps stdout from littering the screen, so we can safely Tee it,
# but it doesn't capture stderr, so we don't want to Tee stderr to the
# real stderr, since it will interfere with our nice field of dots.
self.old_stdout = sys.stdout
self.captured_stdout = StringIO()
sys.stdout = Tee(sys.stdout, self.captured_stdout)
self.old_stderr = sys.stderr
self.captured_stderr = StringIO()
sys.stderr = self.captured_stderr
self.addCleanup(self.cleanup_std_streams)
def cleanup_std_streams(self):
"""Restore stdout and stderr."""
sys.stdout = self.old_stdout
sys.stderr = self.old_stderr
def stdout(self):
"""Return the data written to stdout during the test."""
return self.captured_stdout.getvalue()
def stderr(self):
"""Return the data written to stderr during the test."""
return self.captured_stderr.getvalue()
class TempDirMixin(SysPathAwareMixin, ModuleAwareMixin, TestCase):
"""A test case mixin that creates a temp directory and files in it.
Includes SysPathAwareMixin and ModuleAwareMixin, because making and using
temp dirs like this will also need that kind of isolation.
"""
# Our own setting: most of these tests run in their own temp directory.
# Set this to False in your subclass if you don't want a temp directory
# created.
run_in_temp_dir = True
# Set this if you aren't creating any files with make_file, but still want
# the temp directory. This will stop the test behavior checker from
# complaining.
no_files_in_temp_dir = False
def setUp(self):
super(TempDirMixin, self).setUp()
if self.run_in_temp_dir:
# Create a temporary directory.
self.temp_dir = self.make_temp_dir("test_cover")
self.chdir(self.temp_dir)
# Modules should be importable from this temp directory. We don't
# use '' because we make lots of different temp directories and
# nose's caching importer can get confused. The full path prevents
# problems.
sys.path.insert(0, os.getcwd())
class_behavior = self.class_behavior()
class_behavior.tests += 1
class_behavior.temp_dir = self.run_in_temp_dir
class_behavior.no_files_ok = self.no_files_in_temp_dir
self.addCleanup(self.check_behavior)
def make_temp_dir(self, slug="test_cover"):
"""Make a temp directory that is cleaned up when the test is done."""
name = "%s_%08d" % (slug, random.randint(0, 99999999))
temp_dir = os.path.join(tempfile.gettempdir(), name)
os.makedirs(temp_dir)
self.addCleanup(shutil.rmtree, temp_dir)
return temp_dir
def chdir(self, new_dir):
"""Change directory, and change back when the test is done."""
old_dir = os.getcwd()
os.chdir(new_dir)
self.addCleanup(os.chdir, old_dir)
def check_behavior(self):
"""Check that we did the right things."""
class_behavior = self.class_behavior()
if class_behavior.test_method_made_any_files:
class_behavior.tests_making_files += 1
def make_file(self, filename, text="", newline=None):
"""Create a file for testing.
`filename` is the relative path to the file, including directories if
desired, which will be created if need be.
`text` is the content to create in the file, a native string (bytes in
Python 2, unicode in Python 3).
If `newline` is provided, it is a string that will be used as the line
endings in the created file, otherwise the line endings are as provided
in `text`.
Returns `filename`.
"""
# Tests that call `make_file` should be run in a temp environment.
assert self.run_in_temp_dir
self.class_behavior().test_method_made_any_files = True
text = textwrap.dedent(text)
if newline:
text = text.replace("\n", newline)
# Make sure the directories are available.
dirs, _ = os.path.split(filename)
if dirs and not os.path.exists(dirs):
os.makedirs(dirs)
# Create the file.
with open(filename, 'wb') as f:
f.write(to_bytes(text))
return filename
# We run some tests in temporary directories, because they may need to make
# files for the tests. But this is expensive, so we can change per-class
# whether a temp dir is used or not. It's easy to forget to set that
# option properly, so we track information about what the tests did, and
# then report at the end of the process on test classes that were set
# wrong.
class ClassBehavior(object):
"""A value object to store per-class."""
def __init__(self):
self.tests = 0
self.skipped = 0
self.temp_dir = True
self.no_files_ok = False
self.tests_making_files = 0
self.test_method_made_any_files = False
# Map from class to info about how it ran.
class_behaviors = collections.defaultdict(ClassBehavior)
@classmethod
def report_on_class_behavior(cls):
"""Called at process exit to report on class behavior."""
for test_class, behavior in cls.class_behaviors.items():
bad = ""
if behavior.tests <= behavior.skipped:
bad = ""
elif behavior.temp_dir and behavior.tests_making_files == 0:
if not behavior.no_files_ok:
bad = "Inefficient"
elif not behavior.temp_dir and behavior.tests_making_files > 0:
bad = "Unsafe"
if bad:
if behavior.temp_dir:
where = "in a temp directory"
else:
where = "without a temp directory"
print(
"%s: %s ran %d tests, %d made files %s" % (
bad,
test_class.__name__,
behavior.tests,
behavior.tests_making_files,
where,
)
)
def class_behavior(self):
"""Get the ClassBehavior instance for this test."""
return self.class_behaviors[self.__class__]
# When the process ends, find out about bad classes.
atexit.register(TempDirMixin.report_on_class_behavior)
| apache-2.0 |
jiaphuan/models | research/object_detection/models/faster_rcnn_inception_resnet_v2_feature_extractor.py | 1 | 9271 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Inception Resnet v2 Faster R-CNN implementation.
See "Inception-v4, Inception-ResNet and the Impact of Residual Connections on
Learning" by Szegedy et al. (https://arxiv.org/abs/1602.07261)
as well as
"Speed/accuracy trade-offs for modern convolutional object detectors" by
Huang et al. (https://arxiv.org/abs/1611.10012)
"""
import tensorflow as tf
from object_detection.meta_architectures import faster_rcnn_meta_arch
from nets import inception_resnet_v2
slim = tf.contrib.slim
class FasterRCNNInceptionResnetV2FeatureExtractor(
faster_rcnn_meta_arch.FasterRCNNFeatureExtractor):
"""Faster R-CNN with Inception Resnet v2 feature extractor implementation."""
def __init__(self,
is_training,
first_stage_features_stride,
batch_norm_trainable=False,
reuse_weights=None,
weight_decay=0.0):
"""Constructor.
Args:
is_training: See base class.
first_stage_features_stride: See base class.
batch_norm_trainable: See base class.
reuse_weights: See base class.
weight_decay: See base class.
Raises:
ValueError: If `first_stage_features_stride` is not 8 or 16.
"""
if first_stage_features_stride != 8 and first_stage_features_stride != 16:
raise ValueError('`first_stage_features_stride` must be 8 or 16.')
super(FasterRCNNInceptionResnetV2FeatureExtractor, self).__init__(
is_training, first_stage_features_stride, batch_norm_trainable,
reuse_weights, weight_decay)
def preprocess(self, resized_inputs):
"""Faster R-CNN with Inception Resnet v2 preprocessing.
Maps pixel values to the range [-1, 1].
Args:
resized_inputs: A [batch, height_in, width_in, channels] float32 tensor
representing a batch of images with values between 0 and 255.0.
Returns:
preprocessed_inputs: A [batch, height_out, width_out, channels] float32
tensor representing a batch of images.
"""
return (2.0 / 255.0) * resized_inputs - 1.0
def _extract_proposal_features(self, preprocessed_inputs, scope):
"""Extracts first stage RPN features.
Extracts features using the first half of the Inception Resnet v2 network.
We construct the network in `align_feature_maps=True` mode, which means
that all VALID paddings in the network are changed to SAME padding so that
the feature maps are aligned.
Args:
preprocessed_inputs: A [batch, height, width, channels] float32 tensor
representing a batch of images.
scope: A scope name.
Returns:
rpn_feature_map: A tensor with shape [batch, height, width, depth]
Raises:
InvalidArgumentError: If the spatial size of `preprocessed_inputs`
(height or width) is less than 33.
ValueError: If the created network is missing the required activation.
"""
if len(preprocessed_inputs.get_shape().as_list()) != 4:
raise ValueError('`preprocessed_inputs` must be 4 dimensional, got a '
'tensor of shape %s' % preprocessed_inputs.get_shape())
with slim.arg_scope(inception_resnet_v2.inception_resnet_v2_arg_scope(
weight_decay=self._weight_decay)):
# Forces is_training to False to disable batch norm update.
with slim.arg_scope([slim.batch_norm],
is_training=self._train_batch_norm):
with tf.variable_scope('InceptionResnetV2',
reuse=self._reuse_weights) as scope:
rpn_feature_map, _ = (
inception_resnet_v2.inception_resnet_v2_base(
preprocessed_inputs, final_endpoint='PreAuxLogits',
scope=scope, output_stride=self._first_stage_features_stride,
align_feature_maps=True))
return rpn_feature_map
def _extract_box_classifier_features(self, proposal_feature_maps, scope):
"""Extracts second stage box classifier features.
This function reconstructs the "second half" of the Inception ResNet v2
network after the part defined in `_extract_proposal_features`.
Args:
proposal_feature_maps: A 4-D float tensor with shape
[batch_size * self.max_num_proposals, crop_height, crop_width, depth]
representing the feature map cropped to each proposal.
scope: A scope name.
Returns:
proposal_classifier_features: A 4-D float tensor with shape
[batch_size * self.max_num_proposals, height, width, depth]
representing box classifier features for each proposal.
"""
with tf.variable_scope('InceptionResnetV2', reuse=self._reuse_weights):
with slim.arg_scope(inception_resnet_v2.inception_resnet_v2_arg_scope(
weight_decay=self._weight_decay)):
# Forces is_training to False to disable batch norm update.
with slim.arg_scope([slim.batch_norm],
is_training=self._train_batch_norm):
with slim.arg_scope([slim.conv2d, slim.max_pool2d, slim.avg_pool2d],
stride=1, padding='SAME'):
with tf.variable_scope('Mixed_7a'):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(proposal_feature_maps,
256, 1, scope='Conv2d_0a_1x1')
tower_conv_1 = slim.conv2d(
tower_conv, 384, 3, stride=2,
padding='VALID', scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_1'):
tower_conv1 = slim.conv2d(
proposal_feature_maps, 256, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(
tower_conv1, 288, 3, stride=2,
padding='VALID', scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_2'):
tower_conv2 = slim.conv2d(
proposal_feature_maps, 256, 1, scope='Conv2d_0a_1x1')
tower_conv2_1 = slim.conv2d(tower_conv2, 288, 3,
scope='Conv2d_0b_3x3')
tower_conv2_2 = slim.conv2d(
tower_conv2_1, 320, 3, stride=2,
padding='VALID', scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_3'):
tower_pool = slim.max_pool2d(
proposal_feature_maps, 3, stride=2, padding='VALID',
scope='MaxPool_1a_3x3')
net = tf.concat(
[tower_conv_1, tower_conv1_1, tower_conv2_2, tower_pool], 3)
net = slim.repeat(net, 9, inception_resnet_v2.block8, scale=0.20)
net = inception_resnet_v2.block8(net, activation_fn=None)
proposal_classifier_features = slim.conv2d(
net, 1536, 1, scope='Conv2d_7b_1x1')
return proposal_classifier_features
def restore_from_classification_checkpoint_fn(
self,
first_stage_feature_extractor_scope,
second_stage_feature_extractor_scope):
"""Returns a map of variables to load from a foreign checkpoint.
Note that this overrides the default implementation in
faster_rcnn_meta_arch.FasterRCNNFeatureExtractor which does not work for
InceptionResnetV2 checkpoints.
TODO(jonathanhuang,rathodv): revisit whether it's possible to force the
`Repeat` namescope as created in `_extract_box_classifier_features` to
start counting at 2 (e.g. `Repeat_2`) so that the default restore_fn can
be used.
Args:
first_stage_feature_extractor_scope: A scope name for the first stage
feature extractor.
second_stage_feature_extractor_scope: A scope name for the second stage
feature extractor.
Returns:
A dict mapping variable names (to load from a checkpoint) to variables in
the model graph.
"""
variables_to_restore = {}
for variable in tf.global_variables():
if variable.op.name.startswith(
first_stage_feature_extractor_scope):
var_name = variable.op.name.replace(
first_stage_feature_extractor_scope + '/', '')
variables_to_restore[var_name] = variable
if variable.op.name.startswith(
second_stage_feature_extractor_scope):
var_name = variable.op.name.replace(
second_stage_feature_extractor_scope
+ '/InceptionResnetV2/Repeat', 'InceptionResnetV2/Repeat_2')
var_name = var_name.replace(
second_stage_feature_extractor_scope + '/', '')
variables_to_restore[var_name] = variable
return variables_to_restore
| apache-2.0 |
pfi/maf | maf_template.py | 1 | 4169 | #!/usr/bin/env python
# coding: ISO8859-1
#
# Copyright (c) 2013, Preferred Infrastructure, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
maf - a waf extension for automation of parameterized computational experiments
"""
# NOTE: coding ISO8859-1 is necessary for attaching maflib at the end of this
# file.
import os
import os.path
import shutil
import subprocess
import sys
import tarfile
import waflib.Context
import waflib.Logs
TAR_NAME = 'maflib.tar'
NEW_LINE = '#XXX'.encode()
CARRIAGE_RETURN = '#YYY'.encode()
ARCHIVE_BEGIN = '#==>\n'.encode()
ARCHIVE_END = '#<==\n'.encode()
class _Cleaner:
def __init__(self, directory):
self._cwd = os.getcwd()
self._directory = directory
def __enter__(self):
self.clean()
def __exit__(self, exc_type, exc_value, traceback):
os.chdir(self._cwd)
if exc_type:
self.clean()
return False
def clean(self):
try:
path = os.path.join(self._directory, 'maflib')
shutil.rmtree(path)
except OSError:
pass
def _read_archive(filename):
if filename.endswith('.pyc'):
filename = filename[:-1]
with open(filename, 'rb') as f:
while True:
line = f.readline()
if not line:
raise Exception('archive not found')
if line == ARCHIVE_BEGIN:
content = f.readline()
if not content or f.readline() != ARCHIVE_END:
raise Exception('corrupt archive')
break
return content[1:-1].replace(NEW_LINE, '\n'.encode()).replace(
CARRIAGE_RETURN, '\r'.encode())
def unpack_maflib(directory):
with _Cleaner(directory) as c:
content = _read_archive(__file__)
os.makedirs(os.path.join(directory, 'maflib'))
os.chdir(directory)
bz2_name = TAR_NAME + '.bz2'
with open(bz2_name, 'wb') as f:
f.write(content)
try:
t = tarfile.open(bz2_name)
except:
try:
os.system('bunzip2 ' + bz2_name)
t = tarfile.open(TAR_NAME)
except:
raise Exception('Cannot extract maflib. Check that python bz2 module or bunzip2 command is available.')
try:
t.extractall()
finally:
t.close()
try:
os.remove(bz2_name)
os.remove(TAR_NAME)
except:
pass
maflib_path = os.path.abspath(os.getcwd())
return maflib_path
def test_maflib(directory):
try:
os.stat(os.path.join(directory, 'maflib'))
return os.path.abspath(directory)
except OSError:
return None
def find_maflib():
path = waflib.Context.waf_dir
if not test_maflib(path):
unpack_maflib(path)
return path
find_maflib()
import maflib.core
| bsd-2-clause |
roninek/python101 | games_str/pong_str/pong_str4.py | 4 | 3983 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
import pygame, sys
from pygame.locals import *
# Przygotowanie zmiennych opisujących okno gry oraz obiekty gry i ich właściwości (paletki, piłeczka)
# Inicjacja modułu i obiektów Pygame'a
# inicjacja modułu pygame
pygame.init()
# liczba klatek na sekundę
FPS = 30
# obiekt zegara, który pozwala śledzić czas
fpsClock = pygame.time.Clock()
# szerokość i wysokość okna gry
OKNOGRY_SZER = 800
OKNOGRY_WYS = 400
# przygotowanie powierzchni do rysowania, czyli inicjacja okna gry
OKNOGRY = pygame.display.set_mode((OKNOGRY_SZER, OKNOGRY_WYS), 0, 32)
# tytuł okna gry
pygame.display.set_caption('Prosty Pong')
# kolory wykorzystywane w grze, których składowe RGB zapisane są w tuplach
LT_BLUE = (230, 255, 255)
WHITE = (255, 255, 255)
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
# szerokość, wysokość i pozycja paletek
PALETKA_SZER = 100
PALETKA_WYS = 20
# Inicjacja PALETEK:
# utworzenie powierzchni dla obrazka, wypełnienie jej kolorem,
# pobranie prostokątnego obszaru obrazka i ustawienie go na wstępnej pozycji
PALETKA_1_POZ = (350, 360) # początkowa pozycja paletki gracza
paletka1_obr = pygame.Surface([PALETKA_SZER, PALETKA_WYS])
paletka1_obr.fill(BLUE)
paletka1_prost = paletka1_obr.get_rect()
paletka1_prost.x = PALETKA_1_POZ[0]
paletka1_prost.y = PALETKA_1_POZ[1]
PALETKA_2_POZ = (350, 20) # początkowa pozycja paletki komputera
paletka2_obr = pygame.Surface([PALETKA_SZER, PALETKA_WYS])
paletka2_obr.fill(RED)
paletka2_prost = paletka2_obr.get_rect()
paletka2_prost.x = PALETKA_2_POZ[0]
paletka2_prost.y = PALETKA_2_POZ[1]
# szybkość paletki 1 (AI - ang. artificial inteligence, sztuczna inteligencja), czyli komputera
AI_PREDKOSC = 3
# Inicjacja PIŁKI
# szerokość, wysokość, prędkość pozioma (x) i pionowa (y) PIŁKI
# utworzenie powierzchni dla piłki, narysowanie na niej koła, ustawienie pozycji początkowej
PILKA_SZER = 20
PILKA_WYS = 20
PILKA_PREDKOSC_X = 6
PILKA_PREDKOSC_Y = 6
pilka_obr = pygame.Surface([PILKA_SZER, PILKA_WYS], pygame.SRCALPHA, 32).convert_alpha()
pygame.draw.ellipse(pilka_obr, GREEN, [0, 0, PILKA_SZER, PILKA_WYS])
pilka_prost = pilka_obr.get_rect()
pilka_prost.x = OKNOGRY_SZER/2
pilka_prost.y = OKNOGRY_WYS/2
# Rysowanie komunikatów tekstowych
# ustawienie początkowych wartości liczników punktów
# utworzenie obiektu czcionki z podanego pliku o podanym rozmiarze
GRACZ_1_PKT = '0'
GRACZ_2_PKT = '0'
fontObj = pygame.font.Font('freesansbold.ttf', 64)
# funkcje wyświetlające punkty gracza
# tworzą nowy obrazek z tekstem, pobierają prostokątny obszar obrazka,
# pozycjonują go i rysują w oknie gry
def drukuj_punkty_p1():
tekst_obr1 = fontObj.render(GRACZ_1_PKT, True, (0,0,0))
tekst_prost1 = tekst_obr1.get_rect()
tekst_prost1.center = (OKNOGRY_SZER/2, OKNOGRY_WYS*0.75)
OKNOGRY.blit(tekst_obr1, tekst_prost1)
def drukuj_punkty_p2():
tekst_obr2 = fontObj.render(GRACZ_2_PKT, True, (0,0,0))
tekst_prost2 = tekst_obr2.get_rect()
tekst_prost2.center = (OKNOGRY_SZER/2, OKNOGRY_WYS/4)
OKNOGRY.blit(tekst_obr2, tekst_prost2)
# pętla główna programu
while True:
# obsługa zdarzeń generowanych przez gracza
for event in pygame.event.get():
# przechwyć zamknięcie okna
if event.type == QUIT:
pygame.quit()
sys.exit()
# przechwyć ruch myszy
if event.type == MOUSEMOTION:
# pobierz współrzędne x, y kursora myszy
myszaX, myszaY = event.pos
# przesunięcie paletki gracza
przesuniecie = myszaX-(PALETKA_SZER/2)
# jeżeli wykraczamy poza okno gry w prawo
if przesuniecie > OKNOGRY_SZER-PALETKA_SZER:
przesuniecie = OKNOGRY_SZER-PALETKA_SZER
# jeżeli wykraczamy poza okno gry w lewo
if przesuniecie < 0:
przesuniecie = 0
paletka1_prost.x = przesuniecie
| mit |
Inspq/ansible | test/units/executor/test_task_result.py | 104 | 5583 | # (c) 2016, James Cammarata <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock
from ansible.executor.task_result import TaskResult
class TestTaskResult(unittest.TestCase):
def test_task_result_basic(self):
mock_host = MagicMock()
mock_task = MagicMock()
# test loading a result with a dict
tr = TaskResult(mock_host, mock_task, dict())
# test loading a result with a JSON string
with patch('ansible.parsing.dataloader.DataLoader.load') as p:
tr = TaskResult(mock_host, mock_task, '{}')
def test_task_result_is_changed(self):
mock_host = MagicMock()
mock_task = MagicMock()
# test with no changed in result
tr = TaskResult(mock_host, mock_task, dict())
self.assertFalse(tr.is_changed())
# test with changed in the result
tr = TaskResult(mock_host, mock_task, dict(changed=True))
self.assertTrue(tr.is_changed())
# test with multiple results but none changed
mock_task.loop = 'foo'
tr = TaskResult(mock_host, mock_task, dict(results=[dict(foo='bar'), dict(bam='baz'), True]))
self.assertFalse(tr.is_changed())
# test with multiple results and one changed
mock_task.loop = 'foo'
tr = TaskResult(mock_host, mock_task, dict(results=[dict(changed=False), dict(changed=True), dict(some_key=False)]))
self.assertTrue(tr.is_changed())
def test_task_result_is_skipped(self):
mock_host = MagicMock()
mock_task = MagicMock()
# test with no skipped in result
tr = TaskResult(mock_host, mock_task, dict())
self.assertFalse(tr.is_skipped())
# test with skipped in the result
tr = TaskResult(mock_host, mock_task, dict(skipped=True))
self.assertTrue(tr.is_skipped())
# test with multiple results but none skipped
mock_task.loop = 'foo'
tr = TaskResult(mock_host, mock_task, dict(results=[dict(foo='bar'), dict(bam='baz'), True]))
self.assertFalse(tr.is_skipped())
# test with multiple results and one skipped
mock_task.loop = 'foo'
tr = TaskResult(mock_host, mock_task, dict(results=[dict(skipped=False), dict(skipped=True), dict(some_key=False)]))
self.assertFalse(tr.is_skipped())
# test with multiple results and all skipped
mock_task.loop = 'foo'
tr = TaskResult(mock_host, mock_task, dict(results=[dict(skipped=True), dict(skipped=True), dict(skipped=True)]))
self.assertTrue(tr.is_skipped())
# test with multiple squashed results (list of strings)
# first with the main result having skipped=False
mock_task.loop = 'foo'
tr = TaskResult(mock_host, mock_task, dict(results=["a", "b", "c"], skipped=False))
self.assertFalse(tr.is_skipped())
# then with the main result having skipped=True
tr = TaskResult(mock_host, mock_task, dict(results=["a", "b", "c"], skipped=True))
self.assertTrue(tr.is_skipped())
def test_task_result_is_unreachable(self):
mock_host = MagicMock()
mock_task = MagicMock()
# test with no unreachable in result
tr = TaskResult(mock_host, mock_task, dict())
self.assertFalse(tr.is_unreachable())
# test with unreachable in the result
tr = TaskResult(mock_host, mock_task, dict(unreachable=True))
self.assertTrue(tr.is_unreachable())
# test with multiple results but none unreachable
mock_task.loop = 'foo'
tr = TaskResult(mock_host, mock_task, dict(results=[dict(foo='bar'), dict(bam='baz'), True]))
self.assertFalse(tr.is_unreachable())
# test with multiple results and one unreachable
mock_task.loop = 'foo'
tr = TaskResult(mock_host, mock_task, dict(results=[dict(unreachable=False), dict(unreachable=True), dict(some_key=False)]))
self.assertTrue(tr.is_unreachable())
def test_task_result_is_failed(self):
mock_host = MagicMock()
mock_task = MagicMock()
# test with no failed in result
tr = TaskResult(mock_host, mock_task, dict())
self.assertFalse(tr.is_failed())
# test failed result with rc values
tr = TaskResult(mock_host, mock_task, dict(rc=0))
self.assertFalse(tr.is_failed())
tr = TaskResult(mock_host, mock_task, dict(rc=1))
self.assertTrue(tr.is_failed())
# test with failed in result
tr = TaskResult(mock_host, mock_task, dict(failed=True))
self.assertTrue(tr.is_failed())
# test with failed_when in result
tr = TaskResult(mock_host, mock_task, dict(failed_when_result=True))
self.assertTrue(tr.is_failed())
| gpl-3.0 |
olsaki/ansible-modules-core | system/authorized_key.py | 55 | 15877 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Ansible module to add authorized_keys for ssh logins.
(c) 2012, Brad Olson <[email protected]>
This file is part of Ansible
Ansible is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Ansible is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
DOCUMENTATION = '''
---
module: authorized_key
short_description: Adds or removes an SSH authorized key
description:
- Adds or removes an SSH authorized key for a user from a remote host.
version_added: "0.5"
options:
user:
description:
- The username on the remote host whose authorized_keys file will be modified
required: true
default: null
key:
description:
- The SSH public key(s), as a string or (since 1.9) url (https://github.com/username.keys)
required: true
default: null
path:
description:
- Alternate path to the authorized_keys file
required: false
default: "(homedir)+/.ssh/authorized_keys"
version_added: "1.2"
manage_dir:
description:
- Whether this module should manage the directory of the authorized key file. If
set, the module will create the directory, as well as set the owner and permissions
of an existing directory. Be sure to
set C(manage_dir=no) if you are using an alternate directory for
authorized_keys, as set with C(path), since you could lock yourself out of
SSH access. See the example below.
required: false
choices: [ "yes", "no" ]
default: "yes"
version_added: "1.2"
state:
description:
- Whether the given key (with the given key_options) should or should not be in the file
required: false
choices: [ "present", "absent" ]
default: "present"
key_options:
description:
- A string of ssh key options to be prepended to the key in the authorized_keys file
required: false
default: null
version_added: "1.4"
exclusive:
description:
- Whether to remove all other non-specified keys from the authorized_keys file. Multiple keys
can be specified in a single C(key) string value by separating them by newlines.
- This option is not loop aware, so if you use C(with_) , it will be exclusive per iteration
of the loop, if you want multiple keys in the file you need to pass them all to C(key) in a
single batch as mentioned above.
required: false
choices: [ "yes", "no" ]
default: "no"
version_added: "1.9"
description:
- "Adds or removes authorized keys for particular user accounts"
author: "Brad Olson (@bradobro)"
'''
EXAMPLES = '''
# Example using key data from a local file on the management machine
- authorized_key: user=charlie key="{{ lookup('file', '/home/charlie/.ssh/id_rsa.pub') }}"
# Using github url as key source
- authorized_key: user=charlie key=https://github.com/charlie.keys
# Using alternate directory locations:
- authorized_key: user=charlie
key="{{ lookup('file', '/home/charlie/.ssh/id_rsa.pub') }}"
path='/etc/ssh/authorized_keys/charlie'
manage_dir=no
# Using with_file
- name: Set up authorized_keys for the deploy user
authorized_key: user=deploy
key="{{ item }}"
with_file:
- public_keys/doe-jane
- public_keys/doe-john
# Using key_options:
- authorized_key: user=charlie
key="{{ lookup('file', '/home/charlie/.ssh/id_rsa.pub') }}"
key_options='no-port-forwarding,from="10.0.1.1"'
# Set up authorized_keys exclusively with one key
- authorized_key: user=root key="{{ item }}" state=present
exclusive=yes
with_file:
- public_keys/doe-jane
'''
# Makes sure the public key line is present or absent in the user's .ssh/authorized_keys.
#
# Arguments
# =========
# user = username
# key = line to add to authorized_keys for user
# path = path to the user's authorized_keys file (default: ~/.ssh/authorized_keys)
# manage_dir = whether to create, and control ownership of the directory (default: true)
# state = absent|present (default: present)
#
# see example in examples/playbooks
import sys
import os
import pwd
import os.path
import tempfile
import re
import shlex
class keydict(dict):
""" a dictionary that maintains the order of keys as they are added """
# http://stackoverflow.com/questions/2328235/pythonextend-the-dict-class
def __init__(self, *args, **kw):
super(keydict,self).__init__(*args, **kw)
self.itemlist = super(keydict,self).keys()
def __setitem__(self, key, value):
self.itemlist.append(key)
super(keydict,self).__setitem__(key, value)
def __iter__(self):
return iter(self.itemlist)
def keys(self):
return self.itemlist
def values(self):
return [self[key] for key in self]
def itervalues(self):
return (self[key] for key in self)
def keyfile(module, user, write=False, path=None, manage_dir=True):
"""
Calculate name of authorized keys file, optionally creating the
directories and file, properly setting permissions.
:param str user: name of user in passwd file
:param bool write: if True, write changes to authorized_keys file (creating directories if needed)
:param str path: if not None, use provided path rather than default of '~user/.ssh/authorized_keys'
:param bool manage_dir: if True, create and set ownership of the parent dir of the authorized_keys file
:return: full path string to authorized_keys for user
"""
if module.check_mode and path is not None:
keysfile = path
return keysfile
try:
user_entry = pwd.getpwnam(user)
except KeyError, e:
if module.check_mode and path is None:
module.fail_json(msg="Either user must exist or you must provide full path to key file in check mode")
module.fail_json(msg="Failed to lookup user %s: %s" % (user, str(e)))
if path is None:
homedir = user_entry.pw_dir
sshdir = os.path.join(homedir, ".ssh")
keysfile = os.path.join(sshdir, "authorized_keys")
else:
sshdir = os.path.dirname(path)
keysfile = path
if not write:
return keysfile
uid = user_entry.pw_uid
gid = user_entry.pw_gid
if manage_dir:
if not os.path.exists(sshdir):
os.mkdir(sshdir, 0700)
if module.selinux_enabled():
module.set_default_selinux_context(sshdir, False)
os.chown(sshdir, uid, gid)
os.chmod(sshdir, 0700)
if not os.path.exists(keysfile):
basedir = os.path.dirname(keysfile)
if not os.path.exists(basedir):
os.makedirs(basedir)
try:
f = open(keysfile, "w") #touches file so we can set ownership and perms
finally:
f.close()
if module.selinux_enabled():
module.set_default_selinux_context(keysfile, False)
try:
os.chown(keysfile, uid, gid)
os.chmod(keysfile, 0600)
except OSError:
pass
return keysfile
def parseoptions(module, options):
'''
reads a string containing ssh-key options
and returns a dictionary of those options
'''
options_dict = keydict() #ordered dict
if options:
try:
# the following regex will split on commas while
# ignoring those commas that fall within quotes
regex = re.compile(r'''((?:[^,"']|"[^"]*"|'[^']*')+)''')
parts = regex.split(options)[1:-1]
for part in parts:
if "=" in part:
(key, value) = part.split("=", 1)
options_dict[key] = value
elif part != ",":
options_dict[part] = None
except:
module.fail_json(msg="invalid option string: %s" % options)
return options_dict
def parsekey(module, raw_key):
'''
parses a key, which may or may not contain a list
of ssh-key options at the beginning
'''
VALID_SSH2_KEY_TYPES = [
'ssh-ed25519',
'ecdsa-sha2-nistp256',
'ecdsa-sha2-nistp384',
'ecdsa-sha2-nistp521',
'ssh-dss',
'ssh-rsa',
]
options = None # connection options
key = None # encrypted key string
key_type = None # type of ssh key
type_index = None # index of keytype in key string|list
# remove comment yaml escapes
raw_key = raw_key.replace('\#', '#')
# split key safely
lex = shlex.shlex(raw_key)
lex.quotes = []
lex.commenters = '' #keep comment hashes
lex.whitespace_split = True
key_parts = list(lex)
for i in range(0, len(key_parts)):
if key_parts[i] in VALID_SSH2_KEY_TYPES:
type_index = i
key_type = key_parts[i]
break
# check for options
if type_index is None:
return None
elif type_index > 0:
options = " ".join(key_parts[:type_index])
# parse the options (if any)
options = parseoptions(module, options)
# get key after the type index
key = key_parts[(type_index + 1)]
# set comment to everything after the key
if len(key_parts) > (type_index + 1):
comment = " ".join(key_parts[(type_index + 2):])
return (key, key_type, options, comment)
def readkeys(module, filename):
if not os.path.isfile(filename):
return {}
keys = {}
f = open(filename)
for line in f.readlines():
key_data = parsekey(module, line)
if key_data:
# use key as identifier
keys[key_data[0]] = key_data
else:
# for an invalid line, just append the line
# to the array so it will be re-output later
keys[line] = line
f.close()
return keys
def writekeys(module, filename, keys):
fd, tmp_path = tempfile.mkstemp('', 'tmp', os.path.dirname(filename))
f = open(tmp_path,"w")
try:
for index, key in keys.items():
try:
(keyhash,type,options,comment) = key
option_str = ""
if options:
option_strings = []
for option_key in options.keys():
if options[option_key]:
option_strings.append("%s=%s" % (option_key, options[option_key]))
else:
option_strings.append("%s" % option_key)
option_str = ",".join(option_strings)
option_str += " "
key_line = "%s%s %s %s\n" % (option_str, type, keyhash, comment)
except:
key_line = key
f.writelines(key_line)
except IOError, e:
module.fail_json(msg="Failed to write to file %s: %s" % (tmp_path, str(e)))
f.close()
module.atomic_move(tmp_path, filename)
def enforce_state(module, params):
"""
Add or remove key.
"""
user = params["user"]
key = params["key"]
path = params.get("path", None)
manage_dir = params.get("manage_dir", True)
state = params.get("state", "present")
key_options = params.get("key_options", None)
exclusive = params.get("exclusive", False)
error_msg = "Error getting key from: %s"
# if the key is a url, request it and use it as key source
if key.startswith("http"):
try:
resp, info = fetch_url(module, key)
if info['status'] != 200:
module.fail_json(msg=error_msg % key)
else:
key = resp.read()
except Exception:
module.fail_json(msg=error_msg % key)
# extract individual keys into an array, skipping blank lines and comments
key = [s for s in key.splitlines() if s and not s.startswith('#')]
# check current state -- just get the filename, don't create file
do_write = False
params["keyfile"] = keyfile(module, user, do_write, path, manage_dir)
existing_keys = readkeys(module, params["keyfile"])
# Add a place holder for keys that should exist in the state=present and
# exclusive=true case
keys_to_exist = []
# Check our new keys, if any of them exist we'll continue.
for new_key in key:
parsed_new_key = parsekey(module, new_key)
if not parsed_new_key:
module.fail_json(msg="invalid key specified: %s" % new_key)
if key_options is not None:
parsed_options = parseoptions(module, key_options)
parsed_new_key = (parsed_new_key[0], parsed_new_key[1], parsed_options, parsed_new_key[3])
present = False
matched = False
non_matching_keys = []
if parsed_new_key[0] in existing_keys:
present = True
# Then we check if everything matches, including
# the key type and options. If not, we append this
# existing key to the non-matching list
# We only want it to match everything when the state
# is present
if parsed_new_key != existing_keys[parsed_new_key[0]] and state == "present":
non_matching_keys.append(existing_keys[parsed_new_key[0]])
else:
matched = True
# handle idempotent state=present
if state=="present":
keys_to_exist.append(parsed_new_key[0])
if len(non_matching_keys) > 0:
for non_matching_key in non_matching_keys:
if non_matching_key[0] in existing_keys:
del existing_keys[non_matching_key[0]]
do_write = True
if not matched:
existing_keys[parsed_new_key[0]] = parsed_new_key
do_write = True
elif state=="absent":
if not matched:
continue
del existing_keys[parsed_new_key[0]]
do_write = True
# remove all other keys to honor exclusive
if state == "present" and exclusive:
to_remove = frozenset(existing_keys).difference(keys_to_exist)
for key in to_remove:
del existing_keys[key]
do_write = True
if do_write:
if module.check_mode:
module.exit_json(changed=True)
writekeys(module, keyfile(module, user, do_write, path, manage_dir), existing_keys)
params['changed'] = True
else:
if module.check_mode:
module.exit_json(changed=False)
return params
def main():
module = AnsibleModule(
argument_spec = dict(
user = dict(required=True, type='str'),
key = dict(required=True, type='str'),
path = dict(required=False, type='str'),
manage_dir = dict(required=False, type='bool', default=True),
state = dict(default='present', choices=['absent','present']),
key_options = dict(required=False, type='str'),
unique = dict(default=False, type='bool'),
exclusive = dict(default=False, type='bool'),
),
supports_check_mode=True
)
results = enforce_state(module, module.params)
module.exit_json(**results)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
main()
| gpl-3.0 |
DuCorey/bokeh | tests/integration/annotations/test_title.py | 6 | 1584 | from __future__ import absolute_import
from bokeh.io import save
from bokeh.models import Plot, Range1d, LinearAxis, Circle, Column, ColumnDataSource
import pytest
pytestmark = pytest.mark.integration
HEIGHT = 600
WIDTH = 600
@pytest.mark.screenshot
def test_the_default_titles_settings_and_ensure_outside_any_axes(output_file_url, selenium, screenshot):
# Testing title rendering of background and border is covered in the
# label test. The title added to plot as the primary title
# should always be outside axes and other side renderers.
source = ColumnDataSource(data=dict(x=[1, 2], y=[1, 2]))
def make_plot(location, title_align, two_axes=True):
plot = Plot(
plot_width=400, plot_height=200,
x_range=Range1d(0, 2), y_range=Range1d(0, 2),
toolbar_location=None,
title_location=location,
)
plot.title.text = "Title %s - %s" % (location, title_align)
plot.title.align = title_align
plot.add_glyph(source, Circle(x='x', y='y', radius=0.4))
plot.add_layout(LinearAxis(), location)
if two_axes:
plot.add_layout(LinearAxis(), location)
return plot
layout = Column(
make_plot('above', 'left', two_axes=False), # This is a workaround top doesn't like two axes
make_plot('right', 'right'),
make_plot('below', 'center'),
make_plot('left', 'left')
)
# Save the plot and start the test
save(layout)
selenium.get(output_file_url)
# Take screenshot
screenshot.assert_is_valid()
| bsd-3-clause |
mzdaniel/oh-mainline | vendor/packages/twisted/twisted/test/test_modules.py | 18 | 15268 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for twisted.python.modules, abstract access to imported or importable
objects.
"""
import sys
import itertools
import zipfile
import compileall
import twisted
from twisted.trial.unittest import TestCase
from twisted.python import modules
from twisted.python.filepath import FilePath
from twisted.python.reflect import namedAny
from twisted.test.test_paths import zipit
class PySpaceTestCase(TestCase):
def findByIteration(self, modname, where=modules, importPackages=False):
"""
You don't ever actually want to do this, so it's not in the public API, but
sometimes we want to compare the result of an iterative call with a
lookup call and make sure they're the same for test purposes.
"""
for modinfo in where.walkModules(importPackages=importPackages):
if modinfo.name == modname:
return modinfo
self.fail("Unable to find module %r through iteration." % (modname,))
def replaceSysPath(self, sysPath):
"""
Replace sys.path, for the duration of the test, with the given value.
"""
originalSysPath = sys.path[:]
def cleanUpSysPath():
sys.path[:] = originalSysPath
self.addCleanup(cleanUpSysPath)
sys.path[:] = sysPath
def replaceSysModules(self, sysModules):
"""
Replace sys.modules, for the duration of the test, with the given value.
"""
originalSysModules = sys.modules.copy()
def cleanUpSysModules():
sys.modules.clear()
sys.modules.update(originalSysModules)
self.addCleanup(cleanUpSysModules)
sys.modules.clear()
sys.modules.update(sysModules)
def pathEntryWithOnePackage(self, pkgname="test_package"):
"""
Generate a L{FilePath} with one package, named C{pkgname}, on it, and
return the L{FilePath} of the path entry.
"""
entry = FilePath(self.mktemp())
pkg = entry.child("test_package")
pkg.makedirs()
pkg.child("__init__.py").setContent("")
return entry
class BasicTests(PySpaceTestCase):
def test_unimportablePackageGetItem(self):
"""
If a package has been explicitly forbidden from importing by setting a
C{None} key in sys.modules under its name,
L{modules.PythonPath.__getitem__} should still be able to retrieve an
unloaded L{modules.PythonModule} for that package.
"""
shouldNotLoad = []
path = modules.PythonPath(sysPath=[self.pathEntryWithOnePackage().path],
moduleLoader=shouldNotLoad.append,
importerCache={},
sysPathHooks={},
moduleDict={'test_package': None})
self.assertEquals(shouldNotLoad, [])
self.assertEquals(path['test_package'].isLoaded(), False)
def test_unimportablePackageWalkModules(self):
"""
If a package has been explicitly forbidden from importing by setting a
C{None} key in sys.modules under its name, L{modules.walkModules} should
still be able to retrieve an unloaded L{modules.PythonModule} for that
package.
"""
existentPath = self.pathEntryWithOnePackage()
self.replaceSysPath([existentPath.path])
self.replaceSysModules({"test_package": None})
walked = list(modules.walkModules())
self.assertEquals([m.name for m in walked],
["test_package"])
self.assertEquals(walked[0].isLoaded(), False)
def test_nonexistentPaths(self):
"""
Verify that L{modules.walkModules} ignores entries in sys.path which
do not exist in the filesystem.
"""
existentPath = self.pathEntryWithOnePackage()
nonexistentPath = FilePath(self.mktemp())
self.failIf(nonexistentPath.exists())
self.replaceSysPath([existentPath.path])
expected = [modules.getModule("test_package")]
beforeModules = list(modules.walkModules())
sys.path.append(nonexistentPath.path)
afterModules = list(modules.walkModules())
self.assertEquals(beforeModules, expected)
self.assertEquals(afterModules, expected)
def test_nonDirectoryPaths(self):
"""
Verify that L{modules.walkModules} ignores entries in sys.path which
refer to regular files in the filesystem.
"""
existentPath = self.pathEntryWithOnePackage()
nonDirectoryPath = FilePath(self.mktemp())
self.failIf(nonDirectoryPath.exists())
nonDirectoryPath.setContent("zip file or whatever\n")
self.replaceSysPath([existentPath.path])
beforeModules = list(modules.walkModules())
sys.path.append(nonDirectoryPath.path)
afterModules = list(modules.walkModules())
self.assertEquals(beforeModules, afterModules)
def test_twistedShowsUp(self):
"""
Scrounge around in the top-level module namespace and make sure that
Twisted shows up, and that the module thusly obtained is the same as
the module that we find when we look for it explicitly by name.
"""
self.assertEquals(modules.getModule('twisted'),
self.findByIteration("twisted"))
def test_dottedNames(self):
"""
Verify that the walkModules APIs will give us back subpackages, not just
subpackages.
"""
self.assertEquals(
modules.getModule('twisted.python'),
self.findByIteration("twisted.python",
where=modules.getModule('twisted')))
def test_onlyTopModules(self):
"""
Verify that the iterModules API will only return top-level modules and
packages, not submodules or subpackages.
"""
for module in modules.iterModules():
self.failIf(
'.' in module.name,
"no nested modules should be returned from iterModules: %r"
% (module.filePath))
def test_loadPackagesAndModules(self):
"""
Verify that we can locate and load packages, modules, submodules, and
subpackages.
"""
for n in ['os',
'twisted',
'twisted.python',
'twisted.python.reflect']:
m = namedAny(n)
self.failUnlessIdentical(
modules.getModule(n).load(),
m)
self.failUnlessIdentical(
self.findByIteration(n).load(),
m)
def test_pathEntriesOnPath(self):
"""
Verify that path entries discovered via module loading are, in fact, on
sys.path somewhere.
"""
for n in ['os',
'twisted',
'twisted.python',
'twisted.python.reflect']:
self.failUnlessIn(
modules.getModule(n).pathEntry.filePath.path,
sys.path)
def test_alwaysPreferPy(self):
"""
Verify that .py files will always be preferred to .pyc files, regardless of
directory listing order.
"""
mypath = FilePath(self.mktemp())
mypath.createDirectory()
pp = modules.PythonPath(sysPath=[mypath.path])
originalSmartPath = pp._smartPath
def _evilSmartPath(pathName):
o = originalSmartPath(pathName)
originalChildren = o.children
def evilChildren():
# normally this order is random; let's make sure it always
# comes up .pyc-first.
x = originalChildren()
x.sort()
x.reverse()
return x
o.children = evilChildren
return o
mypath.child("abcd.py").setContent('\n')
compileall.compile_dir(mypath.path, quiet=True)
# sanity check
self.assertEquals(len(mypath.children()), 2)
pp._smartPath = _evilSmartPath
self.assertEquals(pp['abcd'].filePath,
mypath.child('abcd.py'))
def test_packageMissingPath(self):
"""
A package can delete its __path__ for some reasons,
C{modules.PythonPath} should be able to deal with it.
"""
mypath = FilePath(self.mktemp())
mypath.createDirectory()
pp = modules.PythonPath(sysPath=[mypath.path])
subpath = mypath.child("abcd")
subpath.createDirectory()
subpath.child("__init__.py").setContent('del __path__\n')
sys.path.append(mypath.path)
import abcd
try:
l = list(pp.walkModules())
self.assertEquals(len(l), 1)
self.assertEquals(l[0].name, 'abcd')
finally:
del abcd
del sys.modules['abcd']
sys.path.remove(mypath.path)
class PathModificationTest(PySpaceTestCase):
"""
These tests share setup/cleanup behavior of creating a dummy package and
stuffing some code in it.
"""
_serialnum = itertools.count().next # used to generate serial numbers for
# package names.
def setUp(self):
self.pathExtensionName = self.mktemp()
self.pathExtension = FilePath(self.pathExtensionName)
self.pathExtension.createDirectory()
self.packageName = "pyspacetests%d" % (self._serialnum(),)
self.packagePath = self.pathExtension.child(self.packageName)
self.packagePath.createDirectory()
self.packagePath.child("__init__.py").setContent("")
self.packagePath.child("a.py").setContent("")
self.packagePath.child("b.py").setContent("")
self.packagePath.child("c__init__.py").setContent("")
self.pathSetUp = False
def _setupSysPath(self):
assert not self.pathSetUp
self.pathSetUp = True
sys.path.append(self.pathExtensionName)
def _underUnderPathTest(self, doImport=True):
moddir2 = self.mktemp()
fpmd = FilePath(moddir2)
fpmd.createDirectory()
fpmd.child("foozle.py").setContent("x = 123\n")
self.packagePath.child("__init__.py").setContent(
"__path__.append(%r)\n" % (moddir2,))
# Cut here
self._setupSysPath()
modinfo = modules.getModule(self.packageName)
self.assertEquals(
self.findByIteration(self.packageName+".foozle", modinfo,
importPackages=doImport),
modinfo['foozle'])
self.assertEquals(modinfo['foozle'].load().x, 123)
def test_underUnderPathAlreadyImported(self):
"""
Verify that iterModules will honor the __path__ of already-loaded packages.
"""
self._underUnderPathTest()
def test_underUnderPathNotAlreadyImported(self):
"""
Verify that iterModules will honor the __path__ of already-loaded packages.
"""
self._underUnderPathTest(False)
test_underUnderPathNotAlreadyImported.todo = (
"This may be impossible but it sure would be nice.")
def _listModules(self):
pkginfo = modules.getModule(self.packageName)
nfni = [modinfo.name.split(".")[-1] for modinfo in
pkginfo.iterModules()]
nfni.sort()
self.failUnlessEqual(nfni, ['a', 'b', 'c__init__'])
def test_listingModules(self):
"""
Make sure the module list comes back as we expect from iterModules on a
package, whether zipped or not.
"""
self._setupSysPath()
self._listModules()
def test_listingModulesAlreadyImported(self):
"""
Make sure the module list comes back as we expect from iterModules on a
package, whether zipped or not, even if the package has already been
imported.
"""
self._setupSysPath()
namedAny(self.packageName)
self._listModules()
def tearDown(self):
# Intentionally using 'assert' here, this is not a test assertion, this
# is just an "oh fuck what is going ON" assertion. -glyph
if self.pathSetUp:
HORK = "path cleanup failed: don't be surprised if other tests break"
assert sys.path.pop() is self.pathExtensionName, HORK+", 1"
assert self.pathExtensionName not in sys.path, HORK+", 2"
class RebindingTest(PathModificationTest):
"""
These tests verify that the default path interrogation API works properly
even when sys.path has been rebound to a different object.
"""
def _setupSysPath(self):
assert not self.pathSetUp
self.pathSetUp = True
self.savedSysPath = sys.path
sys.path = sys.path[:]
sys.path.append(self.pathExtensionName)
def tearDown(self):
"""
Clean up sys.path by re-binding our original object.
"""
if self.pathSetUp:
sys.path = self.savedSysPath
class ZipPathModificationTest(PathModificationTest):
def _setupSysPath(self):
assert not self.pathSetUp
zipit(self.pathExtensionName, self.pathExtensionName+'.zip')
self.pathExtensionName += '.zip'
assert zipfile.is_zipfile(self.pathExtensionName)
PathModificationTest._setupSysPath(self)
class PythonPathTestCase(TestCase):
"""
Tests for the class which provides the implementation for all of the
public API of L{twisted.python.modules}, L{PythonPath}.
"""
def test_unhandledImporter(self):
"""
Make sure that the behavior when encountering an unknown importer
type is not catastrophic failure.
"""
class SecretImporter(object):
pass
def hook(name):
return SecretImporter()
syspath = ['example/path']
sysmodules = {}
syshooks = [hook]
syscache = {}
def sysloader(name):
return None
space = modules.PythonPath(
syspath, sysmodules, syshooks, syscache, sysloader)
entries = list(space.iterEntries())
self.assertEquals(len(entries), 1)
self.assertRaises(KeyError, lambda: entries[0]['module'])
def test_inconsistentImporterCache(self):
"""
If the path a module loaded with L{PythonPath.__getitem__} is not
present in the path importer cache, a warning is emitted, but the
L{PythonModule} is returned as usual.
"""
space = modules.PythonPath([], sys.modules, [], {})
thisModule = space[__name__]
warnings = self.flushWarnings([self.test_inconsistentImporterCache])
self.assertEquals(warnings[0]['category'], UserWarning)
self.assertEquals(
warnings[0]['message'],
FilePath(twisted.__file__).parent().dirname() +
" (for module " + __name__ + ") not in path importer cache "
"(PEP 302 violation - check your local configuration).")
self.assertEquals(len(warnings), 1)
self.assertEquals(thisModule.name, __name__)
| agpl-3.0 |
sjotterman/python_koans | python2/koans/about_tuples.py | 73 | 2259 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
class AboutTuples(Koan):
def test_creating_a_tuple(self):
count_of_three = (1, 2, 5)
self.assertEqual(__, count_of_three[2])
def test_tuples_are_immutable_so_item_assignment_is_not_possible(self):
count_of_three = (1, 2, 5)
try:
count_of_three[2] = "three"
except TypeError as ex:
self.assertMatch(__, ex[0])
def test_tuples_are_immutable_so_appending_is_not_possible(self):
count_of_three = (1, 2, 5)
try:
count_of_three.append("boom")
except Exception as ex:
self.assertEqual(AttributeError, type(ex))
# Note, assertMatch() uses regular expression pattern matching,
# so you don't have to copy the whole message.
self.assertMatch(__, ex[0])
# Tuples are less flexible than lists, but faster.
def test_tuples_can_only_be_changed_through_replacement(self):
count_of_three = (1, 2, 5)
list_count = list(count_of_three)
list_count.append("boom")
count_of_three = tuple(list_count)
self.assertEqual(__, count_of_three)
def test_tuples_of_one_look_peculiar(self):
self.assertEqual(__, (1).__class__)
self.assertEqual(__, (1,).__class__)
self.assertEqual(__, ("Hello comma!", ))
def test_tuple_constructor_can_be_surprising(self):
self.assertEqual(__, tuple("Surprise!"))
def test_creating_empty_tuples(self):
self.assertEqual(__, ())
self.assertEqual(__, tuple()) # Sometimes less confusing
def test_tuples_can_be_embedded(self):
lat = (37, 14, 6, 'N')
lon = (115, 48, 40, 'W')
place = ('Area 51', lat, lon)
self.assertEqual(__, place)
def test_tuples_are_good_for_representing_records(self):
locations = [
("Illuminati HQ", (38, 52, 15.56, 'N'), (77, 3, 21.46, 'W')),
("Stargate B", (41, 10, 43.92, 'N'), (1, 49, 34.29, 'W')),
]
locations.append(
("Cthulhu", (26, 40, 1, 'N'), (70, 45, 7, 'W'))
)
self.assertEqual(__, locations[2][0])
self.assertEqual(__, locations[0][1][2])
| mit |
paolinux79/FTCryPTUploader | FTCryPTUploader/FtpCoord.py | 1 | 1473 | import threading
class FtpCoord:
shutdown = None
lock = None
stats = {}
def __init__(self):
self.shutdown = False
self.lock = threading.Lock()
def kill(self):
print("raising shutdown")
self.shutdown = True
def need_to_stop(self):
return self.shutdown
def update_stats(self, filepath, size, status, elapsed):
with self.lock:
self.stats[filepath] = {'size':size, 'status' : status, 'elapsed' :elapsed}
def show_stats(self):
xferred = 0
resumed = 0
failed = 0
already = 0
elapsed = 0
size = 0
with self.lock:
for k, v in self.stats.items():
if v['status'] == 'xferred':
xferred += 1
elif v['status'] == 'resumed':
resumed += 1
elif v['status'] == 'failed':
print(k)
failed += 1
elif v['status'] == 'already':
already += 1
elapsed += v['elapsed']
size += v['size']
print("xferred: " + str(xferred))
print("resumed: " + str(resumed))
print("failed: " + str(failed))
print("already: " + str(already))
print("elapsed: " + str(elapsed))
print("size: " + str(size))
if size > 0 and elapsed > 0:
print("bandwith: " + str((size/elapsed)/1024) + " KiB/s") | bsd-2-clause |
fgesora/odoo | openerp/addons/base/tests/test_mail_examples.py | 302 | 57129 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
MISC_HTML_SOURCE = """
<font size="2" style="color: rgb(31, 31, 31); font-family: monospace; font-variant: normal; line-height: normal; ">test1</font>
<div style="color: rgb(31, 31, 31); font-family: monospace; font-variant: normal; line-height: normal; font-size: 12px; font-style: normal; ">
<b>test2</b></div><div style="color: rgb(31, 31, 31); font-family: monospace; font-variant: normal; line-height: normal; font-size: 12px; ">
<i>test3</i></div><div style="color: rgb(31, 31, 31); font-family: monospace; font-variant: normal; line-height: normal; font-size: 12px; ">
<u>test4</u></div><div style="color: rgb(31, 31, 31); font-family: monospace; font-variant: normal; line-height: normal; font-size: 12px; ">
<strike>test5</strike></div><div style="color: rgb(31, 31, 31); font-family: monospace; font-variant: normal; line-height: normal; ">
<font size="5">test6</font></div><div><ul><li><font color="#1f1f1f" face="monospace" size="2">test7</font></li><li>
<font color="#1f1f1f" face="monospace" size="2">test8</font></li></ul><div><ol><li><font color="#1f1f1f" face="monospace" size="2">test9</font>
</li><li><font color="#1f1f1f" face="monospace" size="2">test10</font></li></ol></div></div>
<blockquote style="margin: 0 0 0 40px; border: none; padding: 0px;"><div><div><div><font color="#1f1f1f" face="monospace" size="2">
test11</font></div></div></div></blockquote><blockquote style="margin: 0 0 0 40px; border: none; padding: 0px;">
<blockquote style="margin: 0 0 0 40px; border: none; padding: 0px;"><div><font color="#1f1f1f" face="monospace" size="2">
test12</font></div><div><font color="#1f1f1f" face="monospace" size="2"><br></font></div></blockquote></blockquote>
<font color="#1f1f1f" face="monospace" size="2"><a href="http://google.com">google</a></font>
<a href="javascript:alert('malicious code')">test link</a>
"""
EDI_LIKE_HTML_SOURCE = """<div style="font-family: 'Lucica Grande', Ubuntu, Arial, Verdana, sans-serif; font-size: 12px; color: rgb(34, 34, 34); background-color: #FFF; ">
<p>Hello ${object.partner_id.name},</p>
<p>A new invoice is available for you: </p>
<p style="border-left: 1px solid #8e0000; margin-left: 30px;">
<strong>REFERENCES</strong><br />
Invoice number: <strong>${object.number}</strong><br />
Invoice total: <strong>${object.amount_total} ${object.currency_id.name}</strong><br />
Invoice date: ${object.date_invoice}<br />
Order reference: ${object.origin}<br />
Your contact: <a href="mailto:${object.user_id.email or ''}?subject=Invoice%20${object.number}">${object.user_id.name}</a>
</p>
<br/>
<p>It is also possible to directly pay with Paypal:</p>
<a style="margin-left: 120px;" href="${object.paypal_url}">
<img class="oe_edi_paypal_button" src="https://www.paypal.com/en_US/i/btn/btn_paynowCC_LG.gif"/>
</a>
<br/>
<p>If you have any question, do not hesitate to contact us.</p>
<p>Thank you for choosing ${object.company_id.name or 'us'}!</p>
<br/>
<br/>
<div style="width: 375px; margin: 0px; padding: 0px; background-color: #8E0000; border-top-left-radius: 5px 5px; border-top-right-radius: 5px 5px; background-repeat: repeat no-repeat;">
<h3 style="margin: 0px; padding: 2px 14px; font-size: 12px; color: #DDD;">
<strong style="text-transform:uppercase;">${object.company_id.name}</strong></h3>
</div>
<div style="width: 347px; margin: 0px; padding: 5px 14px; line-height: 16px; background-color: #F2F2F2;">
<span style="color: #222; margin-bottom: 5px; display: block; ">
${object.company_id.street}<br/>
${object.company_id.street2}<br/>
${object.company_id.zip} ${object.company_id.city}<br/>
${object.company_id.state_id and ('%s, ' % object.company_id.state_id.name) or ''} ${object.company_id.country_id.name or ''}<br/>
</span>
<div style="margin-top: 0px; margin-right: 0px; margin-bottom: 0px; margin-left: 0px; padding-top: 0px; padding-right: 0px; padding-bottom: 0px; padding-left: 0px; ">
Phone: ${object.company_id.phone}
</div>
<div>
Web : <a href="${object.company_id.website}">${object.company_id.website}</a>
</div>
</div>
</div></body></html>"""
OERP_WEBSITE_HTML_1 = """
<div>
<div class="container">
<div class="row">
<div class="col-md-12 text-center mt16 mb16" data-snippet-id="colmd">
<h2>OpenERP HR Features</h2>
<h3 class="text-muted">Manage your company most important asset: People</h3>
</div>
<div class="col-md-4" data-snippet-id="colmd">
<img class="img-rounded img-responsive" src="/website/static/src/img/china_thumb.jpg">
<h4 class="mt16">Streamline Recruitments</h4>
<p>Post job offers and keep track of each application received. Follow applicants in your recruitment process with the smart kanban view.</p>
<p>Save time by automating some communications with email templates. Resumes are indexed automatically, allowing you to easily find for specific profiles.</p>
</div>
<div class="col-md-4" data-snippet-id="colmd">
<img class="img-rounded img-responsive" src="/website/static/src/img/desert_thumb.jpg">
<h4 class="mt16">Enterprise Social Network</h4>
<p>Break down information silos. Share knowledge and best practices amongst all employees. Follow specific people or documents and join groups of interests to share expertise and documents.</p>
<p>Interact with your collegues in real time with live chat.</p>
</div>
<div class="col-md-4" data-snippet-id="colmd">
<img class="img-rounded img-responsive" src="/website/static/src/img/deers_thumb.jpg">
<h4 class="mt16">Leaves Management</h4>
<p>Keep track of the vacation days accrued by each employee. Employees enter their requests (paid holidays, sick leave, etc), for managers to approve and validate. It's all done in just a few clicks. The agenda of each employee is updated accordingly.</p>
</div>
</div>
</div>
</div>"""
OERP_WEBSITE_HTML_1_IN = [
'Manage your company most important asset: People',
'img class="img-rounded img-responsive" src="/website/static/src/img/china_thumb.jpg"',
]
OERP_WEBSITE_HTML_1_OUT = [
'Break down information silos.',
'Keep track of the vacation days accrued by each employee',
'img class="img-rounded img-responsive" src="/website/static/src/img/deers_thumb.jpg',
]
OERP_WEBSITE_HTML_2 = """
<div class="mt16 cke_widget_editable cke_widget_element oe_editable oe_dirty" data-oe-model="blog.post" data-oe-id="6" data-oe-field="content" data-oe-type="html" data-oe-translate="0" data-oe-expression="blog_post.content" data-cke-widget-data="{}" data-cke-widget-keep-attr="0" data-widget="oeref" contenteditable="true" data-cke-widget-editable="text">
<section class="mt16 mb16" data-snippet-id="text-block">
<div class="container">
<div class="row">
<div class="col-md-12 text-center mt16 mb32" data-snippet-id="colmd">
<h2>
OpenERP Project Management
</h2>
<h3 class="text-muted">Infinitely flexible. Incredibly easy to use.</h3>
</div>
<div class="col-md-12 mb16 mt16" data-snippet-id="colmd">
<p>
OpenERP's <b>collaborative and realtime</b> project
management helps your team get work done. Keep
track of everything, from the big picture to the
minute details, from the customer contract to the
billing.
</p><p>
Organize projects around <b>your own processes</b>. Work
on tasks and issues using the kanban view, schedule
tasks using the gantt chart and control deadlines
in the calendar view. Every project may have it's
own stages allowing teams to optimize their job.
</p>
</div>
</div>
</div>
</section>
<section class="" data-snippet-id="image-text">
<div class="container">
<div class="row">
<div class="col-md-6 mt16 mb16" data-snippet-id="colmd">
<img class="img-responsive shadow" src="/website/static/src/img/image_text.jpg">
</div>
<div class="col-md-6 mt32" data-snippet-id="colmd">
<h3>Manage Your Shops</h3>
<p>
OpenERP's Point of Sale introduces a super clean
interface with no installation required that runs
online and offline on modern hardwares.
</p><p>
It's full integration with the company inventory
and accounting, gives you real time statistics and
consolidations amongst all shops without the hassle
of integrating several applications.
</p>
</div>
</div>
</div>
</section>
<section class="" data-snippet-id="text-image">
<div class="container">
<div class="row">
<div class="col-md-6 mt32" data-snippet-id="colmd">
<h3>Enterprise Social Network</h3>
<p>
Make every employee feel more connected and engaged
with twitter-like features for your own company. Follow
people, share best practices, 'like' top ideas, etc.
</p><p>
Connect with experts, follow what interests you, share
documents and promote best practices with OpenERP
Social application. Get work done with effective
collaboration across departments, geographies
and business applications.
</p>
</div>
<div class="col-md-6 mt16 mb16" data-snippet-id="colmd">
<img class="img-responsive shadow" src="/website/static/src/img/text_image.png">
</div>
</div>
</div>
</section><section class="" data-snippet-id="portfolio">
<div class="container">
<div class="row">
<div class="col-md-12 text-center mt16 mb32" data-snippet-id="colmd">
<h2>Our Porfolio</h2>
<h4 class="text-muted">More than 500 successful projects</h4>
</div>
<div class="col-md-4" data-snippet-id="colmd">
<img class="img-thumbnail img-responsive" src="/website/static/src/img/deers.jpg">
<img class="img-thumbnail img-responsive" src="/website/static/src/img/desert.jpg">
<img class="img-thumbnail img-responsive" src="/website/static/src/img/china.jpg">
</div>
<div class="col-md-4" data-snippet-id="colmd">
<img class="img-thumbnail img-responsive" src="/website/static/src/img/desert.jpg">
<img class="img-thumbnail img-responsive" src="/website/static/src/img/china.jpg">
<img class="img-thumbnail img-responsive" src="/website/static/src/img/deers.jpg">
</div>
<div class="col-md-4" data-snippet-id="colmd">
<img class="img-thumbnail img-responsive" src="/website/static/src/img/landscape.jpg">
<img class="img-thumbnail img-responsive" src="/website/static/src/img/china.jpg">
<img class="img-thumbnail img-responsive" src="/website/static/src/img/desert.jpg">
</div>
</div>
</div>
</section>
</div>
"""
OERP_WEBSITE_HTML_2_IN = [
'management helps your team get work done',
]
OERP_WEBSITE_HTML_2_OUT = [
'Make every employee feel more connected',
'img class="img-responsive shadow" src="/website/static/src/img/text_image.png',
]
TEXT_1 = """I contact you about our meeting tomorrow. Here is the schedule I propose:
9 AM: brainstorming about our new amazing business app
9.45 AM: summary
10 AM: meeting with Ignasse to present our app
Is everything ok for you ?
--
MySignature"""
TEXT_1_IN = ["""I contact you about our meeting tomorrow. Here is the schedule I propose:
9 AM: brainstorming about our new amazing business app
9.45 AM: summary
10 AM: meeting with Ignasse to present our app
Is everything ok for you ?"""]
TEXT_1_OUT = ["""--
MySignature"""]
TEXT_2 = """Salut Raoul!
Le 28 oct. 2012 à 00:02, Raoul Grosbedon a écrit :
> I contact you about our meeting tomorrow. Here is the schedule I propose: (quote)
Of course. This seems viable.
> 2012/10/27 Bert Tartopoils :
>> blahblahblah (quote)?
>>
>> blahblahblah (quote)
>>
>> Bert TARTOPOILS
>> [email protected]
>>
>
>
> --
> RaoulSignature
Bert TARTOPOILS
[email protected]
"""
TEXT_2_IN = ["Salut Raoul!", "Of course. This seems viable."]
TEXT_2_OUT = ["I contact you about our meeting tomorrow. Here is the schedule I propose: (quote)",
"""> 2012/10/27 Bert Tartopoils :
>> blahblahblah (quote)?
>>
>> blahblahblah (quote)
>>
>> Bert TARTOPOILS
>> [email protected]
>>
>
>
> --
> RaoulSignature"""]
HTML_1 = """<p>I contact you about our meeting for tomorrow. Here is the schedule I propose: (keep)
9 AM: brainstorming about our new amazing business app
9.45 AM: summary
10 AM: meeting with Ignasse to present our app
Is everything ok for you ?
--
MySignature</p>"""
HTML_1_IN = ["""I contact you about our meeting for tomorrow. Here is the schedule I propose: (keep)
9 AM: brainstorming about our new amazing business app
9.45 AM: summary
10 AM: meeting with Ignasse to present our app
Is everything ok for you ?"""]
HTML_1_OUT = ["""--
MySignature"""]
HTML_2 = """<div>
<font><span>I contact you about our meeting for tomorrow. Here is the schedule I propose:</span></font>
</div>
<div>
<ul>
<li><span>9 AM: brainstorming about our new amazing business app</span></li>
<li><span>9.45 AM: summary</span></li>
<li><span>10 AM: meeting with Fabien to present our app</span></li>
</ul>
</div>
<div>
<font><span>Is everything ok for you ?</span></font>
</div>"""
HTML_2_IN = ["<font><span>I contact you about our meeting for tomorrow. Here is the schedule I propose:</span></font>",
"<li><span>9 AM: brainstorming about our new amazing business app</span></li>",
"<li><span>9.45 AM: summary</span></li>",
"<li><span>10 AM: meeting with Fabien to present our app</span></li>",
"<font><span>Is everything ok for you ?</span></font>"]
HTML_2_OUT = []
HTML_3 = """<div><pre>This is an answer.
Regards,
XXXXXX
----- Mail original -----</pre>
<pre>Hi,
My CRM-related question.
Regards,
XXXX</pre></div>"""
HTML_3_IN = ["""<div><pre>This is an answer.
Regards,
XXXXXX
----- Mail original -----</pre>"""]
HTML_3_OUT = ["Hi,", "My CRM-related question.",
"Regards,"]
HTML_4 = """
<div>
<div>Hi Nicholas,</div>
<br>
<div>I'm free now. 00447710085916.</div>
<br>
<div>Regards,</div>
<div>Nicholas</div>
<br>
<span id="OLK_SRC_BODY_SECTION">
<div style="font-family:Calibri; font-size:11pt; text-align:left; color:black; BORDER-BOTTOM: medium none; BORDER-LEFT: medium none; PADDING-BOTTOM: 0in; PADDING-LEFT: 0in; PADDING-RIGHT: 0in; BORDER-TOP: #b5c4df 1pt solid; BORDER-RIGHT: medium none; PADDING-TOP: 3pt">
<span style="font-weight:bold">From: </span>OpenERP Enterprise <<a href="mailto:[email protected]">[email protected]</a>><br><span style="font-weight:bold">Reply-To: </span><<a href="mailto:[email protected]">[email protected]</a>><br><span style="font-weight:bold">Date: </span>Wed, 17 Apr 2013 13:30:47 +0000<br><span style="font-weight:bold">To: </span>Microsoft Office User <<a href="mailto:[email protected]">[email protected]</a>><br><span style="font-weight:bold">Subject: </span>Re: your OpenERP.com registration<br>
</div>
<br>
<div>
<p>Hello Nicholas Saxlund, </p>
<p>I noticed you recently registered to our OpenERP Online solution. </p>
<p>You indicated that you wish to use OpenERP in your own company. We would like to know more about your your business needs and requirements, and see how we can help you. When would you be available to discuss your project ?
</p>
<p>Best regards, </p>
<pre><a href="http://openerp.com">http://openerp.com</a>
Belgium: +32.81.81.37.00
U.S.: +1 (650) 307-6736
India: +91 (79) 40 500 100
</pre>
</div>
</span>
</div>"""
HTML_5 = """<div><pre>Hi,
I have downloaded OpenERP installer 7.0 and successfully installed the postgresql server and the OpenERP.
I created a database and started to install module by log in as administrator.
However, I was not able to install any module due to "OpenERP Server Error" as shown in the attachement.
Could you please let me know how could I fix this problem?
Regards,
Goh Sin Yih
________________________________
From: OpenERP Enterprise <[email protected]>
To: [email protected]
Sent: Friday, February 8, 2013 12:46 AM
Subject: Feedback From Your OpenERP Trial
Hello Goh Sin Yih,
Thank you for having tested OpenERP Online.
I noticed you started a trial of OpenERP Online (gsy) but you did not decide to keep using it.
So, I just wanted to get in touch with you to get your feedback. Can you tell me what kind of application you were you looking for and why you didn't decide to continue with OpenERP?
Thanks in advance for providing your feedback,
Do not hesitate to contact me if you have any questions,
Thanks,
</pre>"""
GMAIL_1 = """Hello,<div><br></div><div>Ok for me. I am replying directly in gmail, without signature.</div><div><br></div><div>Kind regards,</div><div><br></div><div>Demo.<br><br><div>On Thu, Nov 8, 2012 at 5:29 PM, <span><<a href="mailto:[email protected]">[email protected]</a>></span> wrote:<br><blockquote><div>I contact you about our meeting for tomorrow. Here is the schedule I propose:</div><div><ul><li>9 AM: brainstorming about our new amazing business app</span></li></li>
<li>9.45 AM: summary</li><li>10 AM: meeting with Fabien to present our app</li></ul></div><div>Is everything ok for you ?</div>
<div><p>--<br>Administrator</p></div>
<div><p>Log in our portal at: <a href="http://localhost:8069#action=login&db=mail_1&login=demo">http://localhost:8069#action=login&db=mail_1&login=demo</a></p></div>
</blockquote></div><br></div>"""
GMAIL_1_IN = ['Ok for me. I am replying directly in gmail, without signature.']
GMAIL_1_OUT = ['Administrator', 'Log in our portal at:']
THUNDERBIRD_1 = """<div>On 11/08/2012 05:29 PM,
<a href="mailto:[email protected]">[email protected]</a> wrote:<br></div>
<blockquote>
<div>I contact you about our meeting for tomorrow. Here is the
schedule I propose:</div>
<div>
<ul><li>9 AM: brainstorming about our new amazing business
app</span></li></li>
<li>9.45 AM: summary</li>
<li>10 AM: meeting with Fabien to present our app</li>
</ul></div>
<div>Is everything ok for you ?</div>
<div>
<p>--<br>
Administrator</p>
</div>
<div>
<p>Log in our portal at:
<a href="http://localhost:8069#action=login&db=mail_1&token=rHdWcUART5PhEnJRaXjH">http://localhost:8069#action=login&db=mail_1&token=rHdWcUART5PhEnJRaXjH</a></p>
</div>
</blockquote>
Ok for me. I am replying directly below your mail, using Thunderbird, with a signature.<br><br>
Did you receive my email about my new laptop, by the way ?<br><br>
Raoul.<br><pre>--
Raoul Grosbedonnée
</pre>"""
THUNDERBIRD_1_IN = ['Ok for me. I am replying directly below your mail, using Thunderbird, with a signature.']
THUNDERBIRD_1_OUT = ['I contact you about our meeting for tomorrow.', 'Raoul Grosbedon']
HOTMAIL_1 = """<div>
<div dir="ltr"><br>
I have an amazing company, i'm learning OpenERP, it is a small company yet, but plannig to grow up quickly.
<br> <br>Kindest regards,<br>xxx<br>
<div>
<div id="SkyDrivePlaceholder">
</div>
<hr id="stopSpelling">
Subject: Re: your OpenERP.com registration<br>From: [email protected]<br>To: [email protected]<br>Date: Wed, 27 Mar 2013 17:12:12 +0000
<br><br>
Hello xxx,
<br>
I noticed you recently created an OpenERP.com account to access OpenERP Apps.
<br>
You indicated that you wish to use OpenERP in your own company.
We would like to know more about your your business needs and requirements, and see how
we can help you. When would you be available to discuss your project ?<br>
Best regards,<br>
<pre>
<a href="http://openerp.com" target="_blank">http://openerp.com</a>
Belgium: +32.81.81.37.00
U.S.: +1 (650) 307-6736
India: +91 (79) 40 500 100
</pre>
</div>
</div>
</div>"""
HOTMAIL_1_IN = ["I have an amazing company, i'm learning OpenERP, it is a small company yet, but plannig to grow up quickly."]
HOTMAIL_1_OUT = ["Subject: Re: your OpenERP.com registration", " I noticed you recently created an OpenERP.com account to access OpenERP Apps.",
"We would like to know more about your your business needs and requirements", "Belgium: +32.81.81.37.00"]
MSOFFICE_1 = """
<div>
<div class="WordSection1">
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
Our requirements are simple. Just looking to replace some spreadsheets for tracking quotes and possibly using the timecard module.
We are a company of 25 engineers providing product design services to clients.
</span>
</p>
<p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
I’ll install on a windows server and run a very limited trial to see how it works.
If we adopt OpenERP we will probably move to Linux or look for a hosted SaaS option.
</span>
</p>
<p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
<br>
I am also evaluating Adempiere and maybe others.
</span>
</p>
<p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
</span>
</p>
<p> </p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
I expect the trial will take 2-3 months as this is not a high priority for us.
</span>
</p>
<p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
</span>
</p>
<p> </p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
Alan
</span>
</p>
<p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
</span>
</p>
<p> </p>
<p></p>
<div>
<div style="border:none;border-top:solid #B5C4DF 1.0pt;padding:3.0pt 0in 0in 0in">
<p class="MsoNormal">
<b><span style="font-size:10.0pt;font-family:"Tahoma","sans-serif"">
From:
</span></b>
<span style="font-size:10.0pt;font-family:"Tahoma","sans-serif"">
OpenERP Enterprise [mailto:[email protected]]
<br><b>Sent:</b> Monday, 11 March, 2013 14:47<br><b>To:</b> Alan Widmer<br><b>Subject:</b> Re: your OpenERP.com registration
</span>
</p>
<p></p>
<p></p>
</div>
</div>
<p class="MsoNormal"></p>
<p> </p>
<p>Hello Alan Widmer, </p>
<p></p>
<p>I noticed you recently downloaded OpenERP. </p>
<p></p>
<p>
Uou mentioned you wish to use OpenERP in your own company. Please let me more about your
business needs and requirements? When will you be available to discuss about your project?
</p>
<p></p>
<p>Thanks for your interest in OpenERP, </p>
<p></p>
<p>Feel free to contact me if you have any questions, </p>
<p></p>
<p>Looking forward to hear from you soon. </p>
<p></p>
<pre><p> </p></pre>
<pre>--<p></p></pre>
<pre>Nicolas<p></p></pre>
<pre><a href="http://openerp.com">http://openerp.com</a><p></p></pre>
<pre>Belgium: +32.81.81.37.00<p></p></pre>
<pre>U.S.: +1 (650) 307-6736<p></p></pre>
<pre>India: +91 (79) 40 500 100<p></p></pre>
<pre> <p></p></pre>
</div>
</div>"""
MSOFFICE_1_IN = ['Our requirements are simple. Just looking to replace some spreadsheets for tracking quotes and possibly using the timecard module.']
MSOFFICE_1_OUT = ['I noticed you recently downloaded OpenERP.', 'Uou mentioned you wish to use OpenERP in your own company.', 'Belgium: +32.81.81.37.00']
MSOFFICE_2 = """
<div>
<div class="WordSection1">
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Nicolas,</span></p><p></p>
<p></p>
<p class="MsoNormal" style="text-indent:.5in">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">We are currently investigating the possibility of moving away from our current ERP </span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
</span></p><p> </p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Thank You</span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Matt</span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
</span></p><p> </p>
<p></p>
<div>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Raoul Petitpoil</span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Poil Industries</span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Information Technology</span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">920 Super Street</span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Sanchez, Pa 17046 USA</span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Tel: xxx.xxx</span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Fax: xxx.xxx</span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Email: </span>
<a href="mailto:[email protected]">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:blue">[email protected]</span>
</a>
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
</span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">www.poilindustries.com</span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">www.superproducts.com</span></p><p></p>
<p></p>
</div>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
</span></p><p> </p>
<p></p>
<div>
<div style="border:none;border-top:solid #B5C4DF 1.0pt;padding:3.0pt 0in 0in 0in">
<p class="MsoNormal">
<b>
<span style="font-size:10.0pt;font-family:"Tahoma","sans-serif"">From:</span>
</b>
<span style="font-size:10.0pt;font-family:"Tahoma","sans-serif""> OpenERP Enterprise [mailto:[email protected]] <br><b>Sent:</b> Wednesday, April 17, 2013 1:31 PM<br><b>To:</b> Matt Witters<br><b>Subject:</b> Re: your OpenERP.com registration</span></p><p></p>
<p></p>
</div>
</div>
<p class="MsoNormal"></p>
<p> </p>
<p>Hello Raoul Petitpoil, </p>
<p></p>
<p>I noticed you recently downloaded OpenERP. </p>
<p></p>
<p>You indicated that you wish to use OpenERP in your own company. We would like to know more about your your business needs and requirements, and see how we can help you. When would you be available to discuss your project ? </p>
<p></p>
<p>Best regards, </p>
<p></p>
<pre> <p> </p>
</pre>
<pre>--<p></p></pre>
<pre>Nicolas<p></p></pre>
<pre> <a href="http://openerp.com">http://openerp.com</a>
<p></p>
</pre>
<pre>Belgium: +32.81.81.37.00<p></p></pre>
<pre>U.S.: +1 (650) 307-6736<p></p></pre>
<pre>India: +91 (79) 40 500 100<p></p></pre>
<pre> <p></p></pre>
</div>
</div>"""
MSOFFICE_2_IN = ['We are currently investigating the possibility']
MSOFFICE_2_OUT = ['I noticed you recently downloaded OpenERP.', 'You indicated that you wish', 'Belgium: +32.81.81.37.00']
MSOFFICE_3 = """<div>
<div class="WordSection1">
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Hi Nicolas !</span></p><p></p>
<p></p>
<p class="MsoNormal">
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
</span></p><p> </p>
<p></p>
<p class="MsoNormal">
<span lang="EN-US" style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Yes I’d be glad to hear about your offers as we struggle every year with the planning/approving of LOA. </span></p><p></p>
<p></p>
<p class="MsoNormal">
<span lang="EN-US" style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">I saw your boss yesterday on tv and immediately wanted to test the interface. </span></p><p></p>
<p></p>
<p class="MsoNormal">
<span lang="EN-US" style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
</span></p><p> </p>
<p></p>
<div>
<p class="MsoNormal">
<b>
<span lang="NL-BE" style="font-size:10.0pt;font-family:"Trebuchet MS","sans-serif";color:gray">Bien à vous, </span></b></p><p></p><b>
</b>
<p></p>
<p class="MsoNormal">
<b>
<span lang="NL-BE" style="font-size:10.0pt;font-family:"Trebuchet MS","sans-serif";color:gray">Met vriendelijke groeten, </span></b></p><p></p><b>
</b>
<p></p>
<p class="MsoNormal">
<b>
<span lang="EN-GB" style="font-size:10.0pt;font-family:"Trebuchet MS","sans-serif";color:gray">Best regards,</span></b></p><p></p><b>
</b>
<p></p>
<p class="MsoNormal">
<b>
<span lang="EN-GB" style="font-size:10.0pt;font-family:"Trebuchet MS","sans-serif";color:gray">
</span></b></p><p><b> </b></p><b>
</b>
<p></p>
<p class="MsoNormal">
<b>
<span lang="EN-GB" style="font-size:10.0pt;font-family:"Trebuchet MS","sans-serif";color:gray">R. Petitpoil <br></span>
</b>
<span lang="EN-GB" style="font-size:10.0pt;font-family:"Trebuchet MS","sans-serif";color:gray">Human Resource Manager<b><br><br>Field Resource s.a n.v. <i> <br></i></b>Hermesstraat 6A <br>1930 Zaventem</span>
<span lang="EN-GB" style="font-size:8.0pt;font-family:"Tahoma","sans-serif";color:gray"><br></span>
<b>
<span lang="FR" style="font-size:10.0pt;font-family:Wingdings;color:#1F497D">(</span>
</b>
<b>
<span lang="FR" style="font-size:9.0pt;font-family:Wingdings;color:#1F497D"> </span>
</b>
<b>
<span lang="EN-GB" style="font-size:8.0pt;font-family:"Trebuchet MS","sans-serif";color:gray">xxx.xxx </span>
</b>
<b>
<span lang="EN-GB" style="font-size:9.0pt;font-family:"Trebuchet MS","sans-serif";color:gray"><br></span>
</b>
<b>
<span lang="FR" style="font-size:10.0pt;font-family:"Wingdings 2";color:#1F497D">7</span>
</b>
<b>
<span lang="FR" style="font-size:9.0pt;font-family:"Wingdings 2";color:#1F497D"> </span>
</b>
<b>
<span lang="EN-GB" style="font-size:8.0pt;font-family:"Trebuchet MS","sans-serif";color:gray">+32 2 727.05.91<br></span>
</b>
<span lang="EN-GB" style="font-size:24.0pt;font-family:Webdings;color:green">P</span>
<span lang="EN-GB" style="font-size:8.0pt;font-family:"Tahoma","sans-serif";color:green"> <b> </b></span>
<b>
<span lang="EN-GB" style="font-size:9.0pt;font-family:"Trebuchet MS","sans-serif";color:green">Please consider the environment before printing this email.</span>
</b>
<span lang="EN-GB" style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:navy"> </span>
<span lang="EN-GB" style="font-family:"Calibri","sans-serif";color:navy">
</span></p><p></p>
<p></p>
</div>
<p class="MsoNormal">
<span lang="EN-US" style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
</span></p><p> </p>
<p></p>
<div>
<div style="border:none;border-top:solid #B5C4DF 1.0pt;padding:3.0pt 0cm 0cm 0cm">
<p class="MsoNormal">
<b>
<span lang="FR" style="font-size:10.0pt;font-family:"Tahoma","sans-serif"">De :</span>
</b>
<span lang="FR" style="font-size:10.0pt;font-family:"Tahoma","sans-serif""> OpenERP Enterprise [mailto:[email protected]] <br><b>Envoyé :</b> jeudi 18 avril 2013 11:31<br><b>À :</b> Paul Richard<br><b>Objet :</b> Re: your OpenERP.com registration</span></p><p></p>
<p></p>
</div>
</div>
<p class="MsoNormal"></p>
<p> </p>
<p>Hello Raoul PETITPOIL, </p>
<p></p>
<p>I noticed you recently registered to our OpenERP Online solution. </p>
<p></p>
<p>You indicated that you wish to use OpenERP in your own company. We would like to know more about your your business needs and requirements, and see how we can help you. When would you be available to discuss your project ? </p>
<p></p>
<p>Best regards, </p>
<p></p>
<pre> <p> </p>
</pre>
<pre>--<p></p></pre>
<pre>Nicolas<p></p></pre>
<pre> <a href="http://openerp.com">http://openerp.com</a>
<p></p>
</pre>
<pre>Belgium: +32.81.81.37.00<p></p></pre>
<pre>U.S.: +1 (650) 307-6736<p></p></pre>
<pre>India: +91 (79) 40 500 100<p></p></pre>
<pre> <p></p></pre>
</div>
</div>"""
MSOFFICE_3_IN = ['I saw your boss yesterday']
MSOFFICE_3_OUT = ['I noticed you recently downloaded OpenERP.', 'You indicated that you wish', 'Belgium: +32.81.81.37.00']
# ------------------------------------------------------------
# Test cases coming from bugs
# ------------------------------------------------------------
# bug: read more not apparent, strange message in read more span
BUG1 = """<pre>Hi Migration Team,
Paragraph 1, blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah.
Paragraph 2, blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah.
Paragraph 3, blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
blah blah blah blah blah blah blah blah.
Thanks.
Regards,
--
Olivier Laurent
Migration Manager
OpenERP SA
Chaussée de Namur, 40
B-1367 Gérompont
Tel: +32.81.81.37.00
Web: http://www.openerp.com</pre>"""
BUG_1_IN = [
'Hi Migration Team',
'Paragraph 1'
]
BUG_1_OUT = [
'Olivier Laurent',
'Chaussée de Namur',
'81.81.37.00',
'openerp.com',
]
BUG2 = """
<div>
<br>
<div class="moz-forward-container"><br>
<br>
-------- Original Message --------
<table class="moz-email-headers-table" border="0" cellpadding="0" cellspacing="0">
<tbody>
<tr>
<th nowrap="" valign="BASELINE" align="RIGHT">Subject:
</th>
<td>Fwd: TR: OpenERP S.A. Payment Reminder</td>
</tr>
<tr>
<th nowrap="" valign="BASELINE" align="RIGHT">Date: </th>
<td>Wed, 16 Oct 2013 14:11:13 +0200</td>
</tr>
<tr>
<th nowrap="" valign="BASELINE" align="RIGHT">From: </th>
<td>Christine Herrmann <a class="moz-txt-link-rfc2396E" href="mailto:[email protected]"><[email protected]></a></td>
</tr>
<tr>
<th nowrap="" valign="BASELINE" align="RIGHT">To: </th>
<td><a class="moz-txt-link-abbreviated" href="mailto:[email protected]">[email protected]</a></td>
</tr>
</tbody>
</table>
<br>
<br>
<br>
<div class="moz-forward-container"><br>
<br>
-------- Message original --------
<table class="moz-email-headers-table" border="0" cellpadding="0" cellspacing="0">
<tbody>
<tr>
<th nowrap="" valign="BASELINE" align="RIGHT">Sujet:
</th>
<td>TR: OpenERP S.A. Payment Reminder</td>
</tr>
<tr>
<th nowrap="" valign="BASELINE" align="RIGHT">Date :
</th>
<td>Wed, 16 Oct 2013 10:34:45 -0000</td>
</tr>
<tr>
<th nowrap="" valign="BASELINE" align="RIGHT">De : </th>
<td>Ida Siwatala <a class="moz-txt-link-rfc2396E" href="mailto:[email protected]"><[email protected]></a></td>
</tr>
<tr>
<th nowrap="" valign="BASELINE" align="RIGHT">Répondre
à : </th>
<td><a class="moz-txt-link-abbreviated" href="mailto:[email protected]">[email protected]</a></td>
</tr>
<tr>
<th nowrap="" valign="BASELINE" align="RIGHT">Pour :
</th>
<td>Christine Herrmann (che) <a class="moz-txt-link-rfc2396E" href="mailto:[email protected]"><[email protected]></a></td>
</tr>
</tbody>
</table>
<br>
<br>
<div>
<div class="WordSection1">
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Bonjour,</span></p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D"></span></p>
<p> </p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Pourriez-vous
me faire un retour sur ce point.</span></p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D"></span></p>
<p> </p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Cordialement</span></p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D"></span></p>
<p> </p>
<div>
<div style="border:none;border-top:solid #B5C4DF
1.0pt;padding:3.0pt 0cm 0cm 0cm">
<p class="MsoNormal"><b><span style="font-size:10.0pt;font-family:"Tahoma","sans-serif"">De :</span></b><span style="font-size:10.0pt;font-family:"Tahoma","sans-serif"">
Ida Siwatala [<a class="moz-txt-link-freetext" href="mailto:[email protected]">mailto:[email protected]</a>]
<br>
<b>Envoyé :</b> vendredi 4 octobre 2013 20:03<br>
<b>À :</b> 'Followers of
INZO-services-8-all-e-Maxime-Lisbonne-77176-Savigny-le-temple-France'<br>
<b>Objet :</b> RE: OpenERP S.A. Payment Reminder</span></p>
</div>
</div>
<p> </p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Bonsoir,</span></p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D"></span></p>
<p> </p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Je
me permets de revenir vers vous par écrit , car j’ai
fait 2 appels vers votre service en exposant mon
problème, mais je n’ai pas eu de retour.</span></p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Cela
fait un mois que j’ai fait la souscription de votre
produit, mais je me rends compte qu’il est pas adapté à
ma situation ( fonctionnalité manquante et surtout je
n’ai pas beaucoup de temps à passer à résoudre des
bugs). </span></p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">C’est
pourquoi , j’ai demandé qu’un accord soit trouvé avec
vous pour annuler le contrat (tout en vous payant le
mois d’utilisation de septembre).</span></p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D"></span></p>
<p> </p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Pourriez-vous
me faire un retour sur ce point.</span></p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D"></span></p>
<p> </p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Cordialement,</span></p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D"></span></p>
<p> </p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">Ida
Siwatala</span></p>
<p class="MsoNormal"><span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D"></span></p>
<p> </p>
<p class="MsoNormal"><b><span style="font-size:10.0pt;font-family:"Tahoma","sans-serif"">De :</span></b><span style="font-size:10.0pt;font-family:"Tahoma","sans-serif"">
<a href="mailto:[email protected]">[email protected]</a>
[<a href="mailto:[email protected]">mailto:[email protected]</a>]
<br>
<b>Envoyé :</b> vendredi 4 octobre 2013 17:41<br>
<b>À :</b> <a href="mailto:[email protected]">[email protected]</a><br>
<b>Objet :</b> OpenERP S.A. Payment Reminder</span></p>
<p> </p>
<div>
<p style="background:white"><span style="font-size:9.0pt;font-family:"Arial","sans-serif";color:#222222">Dear
INZO services,</span></p>
<p style="background:white"><span style="font-size:9.0pt;font-family:"Arial","sans-serif";color:#222222">Exception
made if there was a mistake of ours, it seems that the
following amount stays unpaid. Please, take
appropriate measures in order to carry out this
payment in the next 8 days. </span></p>
<p class="MsoNormal" style="background:white"><span style="font-size:9.0pt;font-family:"Arial","sans-serif";color:#222222"></span></p>
<p> </p>
<table class="MsoNormalTable" style="width:100.0%;border:outset 1.5pt" width="100%" border="1" cellpadding="0">
<tbody>
<tr>
<td style="padding:.75pt .75pt .75pt .75pt">
<p class="MsoNormal">Date de facturation</p>
</td>
<td style="padding:.75pt .75pt .75pt .75pt">
<p class="MsoNormal">Description</p>
</td>
<td style="padding:.75pt .75pt .75pt .75pt">
<p class="MsoNormal">Reference</p>
</td>
<td style="padding:.75pt .75pt .75pt .75pt">
<p class="MsoNormal">Due Date</p>
</td>
<td style="padding:.75pt .75pt .75pt .75pt">
<p class="MsoNormal">Amount (€)</p>
</td>
<td style="padding:.75pt .75pt .75pt .75pt">
<p class="MsoNormal">Lit.</p>
</td>
</tr>
<tr>
<td style="padding:.75pt .75pt .75pt .75pt">
<p class="MsoNormal"><b>2013-09-24</b></p>
</td>
<td style="padding:.75pt .75pt .75pt .75pt">
<p class="MsoNormal"><b>2013/1121</b></p>
</td>
<td style="padding:.75pt .75pt .75pt .75pt">
<p class="MsoNormal"><b>Enterprise - Inzo Services
- Juillet 2013</b></p>
</td>
<td style="padding:.75pt .75pt .75pt .75pt">
<p class="MsoNormal"><b>2013-09-24</b></p>
</td>
<td style="padding:.75pt .75pt .75pt .75pt">
<p class="MsoNormal"><b>420.0</b></p>
</td>
<td style="padding:.75pt .75pt .75pt .75pt"><br>
</td>
</tr>
<tr>
<td style="padding:.75pt .75pt .75pt .75pt"><br>
</td>
<td style="border:none;padding:.75pt .75pt .75pt
.75pt"><br>
</td>
<td style="border:none;padding:.75pt .75pt .75pt
.75pt"><br>
</td>
<td style="border:none;padding:.75pt .75pt .75pt
.75pt"><br>
</td>
<td style="border:none;padding:.75pt .75pt .75pt
.75pt"><br>
</td>
<td style="border:none;padding:.75pt .75pt .75pt
.75pt"><br>
</td>
</tr>
</tbody>
</table>
<p class="MsoNormal" style="text-align:center;background:white" align="center"><span style="font-size:9.0pt;font-family:"Arial","sans-serif";color:#222222">Amount
due : 420.00 € </span></p>
<p style="background:white"><span style="font-size:9.0pt;font-family:"Arial","sans-serif";color:#222222">Would
your payment have been carried out after this mail was
sent, please ignore this message. Do not hesitate to
contact our accounting department. </span></p>
<p class="MsoNormal" style="background:white"><span style="font-size:9.0pt;font-family:"Arial","sans-serif";color:#222222"><br>
Best Regards, <br>
Aurore Lesage <br>
OpenERP<br>
Chaussée de Namur, 40 <br>
B-1367 Grand Rosières <br>
Tel: +32.81.81.37.00 - Fax: +32.81.73.35.01 <br>
E-mail : <a href="mailto:[email protected]">[email protected]</a> <br>
Web: <a href="http://www.openerp.com">http://www.openerp.com</a></span></p>
</div>
</div>
</div>
--<br>
INZO services <small>Sent by <a style="color:inherit" href="http://www.openerp.com">OpenERP
S.A.</a> using <a style="color:inherit" href="https://www.openerp.com/">OpenERP</a>.</small>
<small>Access your messages and documents <a style="color:inherit" href="https://accounts.openerp.com?db=openerp#action=mail.action_mail_redirect&login=che&message_id=5750830">in
OpenERP</a></small> <br>
<pre class="moz-signature" cols="72">--
Christine Herrmann
OpenERP
Chaussée de Namur, 40
B-1367 Grand Rosières
Tel: +32.81.81.37.00 - Fax: +32.81.73.35.01
Web: <a class="moz-txt-link-freetext" href="http://www.openerp.com">http://www.openerp.com</a> </pre>
<br>
</div>
<br>
<br>
</div>
<br>
</div>"""
BUG_2_IN = [
'read more',
'...',
]
BUG_2_OUT = [
'Fwd: TR: OpenERP S.A'
'fait un mois'
]
# BUG 20/08/2014: READ MORE NOT APPEARING
BUG3 = """<div class="oe_msg_body_long" style="/* display: none; */"><p>OpenERP has been upgraded to version 8.0.</p>
<h2>What's new in this upgrade?</h2>
<div class="document">
<ul>
<li><p class="first">New Warehouse Management System:</p>
<blockquote>
<p>Schedule your picking, packing, receptions and internal moves automatically with Odoo using
your own routing rules. Define push and pull rules to organize a warehouse or to manage
product moves between several warehouses. Track in detail all stock moves, not only in your
warehouse but wherever else it's taken as well (customers, suppliers or manufacturing
locations).</p>
</blockquote>
</li>
<li><p class="first">New Product Configurator</p>
</li>
<li><p class="first">Documentation generation from website forum:</p>
<blockquote>
<p>New module to generate a documentation from questions and responses from your forum.
The documentation manager can define a table of content and any user, depending their karma,
can link a question to an entry of this TOC.</p>
</blockquote>
</li>
<li><p class="first">New kanban view of documents (resumes and letters in recruitement, project documents...)</p>
</li>
<li><p class="first">E-Commerce:</p>
<blockquote>
<ul class="simple">
<li>Manage TIN in contact form for B2B.</li>
<li>Dedicated salesteam to easily manage leads and orders.</li>
</ul>
</blockquote>
</li>
<li><p class="first">Better Instant Messaging.</p>
</li>
<li><p class="first">Faster and Improved Search view: Search drawer now appears on top of the results, and is open
by default in reporting views</p>
</li>
<li><p class="first">Improved User Interface:</p>
<blockquote>
<ul class="simple">
<li>Popups has changed to be more responsive on tablets and smartphones.</li>
<li>New Stat Buttons: Forms views have now dynamic buttons showing some statistics abouts linked models.</li>
<li>Color code to check in one look availability of components in an MRP order.</li>
<li>Unified menu bar allows you to switch easily between the frontend (website) and backend</li>
<li>Results panel is now scrollable independently of the menu bars, keeping the navigation,
search bar and view switcher always within reach.</li>
</ul>
</blockquote>
</li>
<li><p class="first">User signature is now in HTML.</p>
</li>
<li><p class="first">New development API.</p>
</li>
<li><p class="first">Remove support for Outlook and Thunderbird plugins</p>
</li>
</ul>
</div>
<p>Enjoy the new OpenERP Online!</p><span class="oe_mail_reduce"><a href="#">read less</a></span></div>"""
BUG_3_IN = [
'read more',
'...',
]
BUG_3_OUT = [
'New kanban view of documents'
]
| agpl-3.0 |
jmighion/ansible | lib/ansible/modules/cloud/univention/udm_user.py | 29 | 21233 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
# Copyright (c) 2016, Adfinis SyGroup AG
# Tobias Rueetschi <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: udm_user
version_added: "2.2"
author: "Tobias Rueetschi (@2-B)"
short_description: Manage posix users on a univention corporate server
description:
- "This module allows to manage posix users on a univention corporate
server (UCS).
It uses the python API of the UCS to create a new object or edit it."
requirements:
- Python >= 2.6
options:
state:
required: false
default: "present"
choices: [ present, absent ]
description:
- Whether the user is present or not.
username:
required: true
description:
- User name
aliases: ['name']
firstname:
required: false
description:
- First name. Required if C(state=present).
lastname:
required: false
description:
- Last name. Required if C(state=present).
password:
required: false
default: None
description:
- Password. Required if C(state=present).
birthday:
required: false
default: None
description:
- Birthday
city:
required: false
default: None
description:
- City of users business address.
country:
required: false
default: None
description:
- Country of users business address.
department_number:
required: false
default: None
description:
- Department number of users business address.
aliases: [ departmentNumber ]
description:
required: false
default: None
description:
- Description (not gecos)
display_name:
required: false
default: None
description:
- Display name (not gecos)
aliases: [ displayName ]
email:
required: false
default: ['']
description:
- A list of e-mail addresses.
employee_number:
required: false
default: None
description:
- Employee number
aliases: [ employeeNumber ]
employee_type:
required: false
default: None
description:
- Employee type
aliases: [ employeeType ]
gecos:
required: false
default: None
description:
- GECOS
groups:
required: false
default: []
description:
- "POSIX groups, the LDAP DNs of the groups will be found with the
LDAP filter for each group as $GROUP:
C((&(objectClass=posixGroup)(cn=$GROUP)))."
home_share:
required: false
default: None
description:
- "Home NFS share. Must be a LDAP DN, e.g.
C(cn=home,cn=shares,ou=school,dc=example,dc=com)."
aliases: [ homeShare ]
home_share_path:
required: false
default: None
description:
- Path to home NFS share, inside the homeShare.
aliases: [ homeSharePath ]
home_telephone_number:
required: false
default: []
description:
- List of private telephone numbers.
aliases: [ homeTelephoneNumber ]
homedrive:
required: false
default: None
description:
- Windows home drive, e.g. C("H:").
mail_alternative_address:
required: false
default: []
description:
- List of alternative e-mail addresses.
aliases: [ mailAlternativeAddress ]
mail_home_server:
required: false
default: None
description:
- FQDN of mail server
aliases: [ mailHomeServer ]
mail_primary_address:
required: false
default: None
description:
- Primary e-mail address
aliases: [ mailPrimaryAddress ]
mobile_telephone_number:
required: false
default: []
description:
- Mobile phone number
aliases: [ mobileTelephoneNumber ]
organisation:
required: false
default: None
description:
- Organisation
override_pw_history:
required: false
default: False
description:
- Override password history
aliases: [ overridePWHistory ]
override_pw_length:
required: false
default: False
description:
- Override password check
aliases: [ overridePWLength ]
pager_telephonenumber:
required: false
default: []
description:
- List of pager telephone numbers.
aliases: [ pagerTelephonenumber ]
phone:
required: false
default: []
description:
- List of telephone numbers.
postcode:
required: false
default: None
description:
- Postal code of users business address.
primary_group:
required: false
default: cn=Domain Users,cn=groups,$LDAP_BASE_DN
description:
- Primary group. This must be the group LDAP DN.
aliases: [ primaryGroup ]
profilepath:
required: false
default: None
description:
- Windows profile directory
pwd_change_next_login:
required: false
default: None
choices: [ '0', '1' ]
description:
- Change password on next login.
aliases: [ pwdChangeNextLogin ]
room_number:
required: false
default: None
description:
- Room number of users business address.
aliases: [ roomNumber ]
samba_privileges:
required: false
default: []
description:
- "Samba privilege, like allow printer administration, do domain
join."
aliases: [ sambaPrivileges ]
samba_user_workstations:
required: false
default: []
description:
- Allow the authentication only on this Microsoft Windows host.
aliases: [ sambaUserWorkstations ]
sambahome:
required: false
default: None
description:
- Windows home path, e.g. C('\\\\$FQDN\\$USERNAME').
scriptpath:
required: false
default: None
description:
- Windows logon script.
secretary:
required: false
default: []
description:
- A list of superiors as LDAP DNs.
serviceprovider:
required: false
default: ['']
description:
- Enable user for the following service providers.
shell:
required: false
default: '/bin/bash'
description:
- Login shell
street:
required: false
default: None
description:
- Street of users business address.
title:
required: false
default: None
description:
- Title, e.g. C(Prof.).
unixhome:
required: false
default: '/home/$USERNAME'
description:
- Unix home directory
userexpiry:
required: false
default: Today + 1 year
description:
- Account expiry date, e.g. C(1999-12-31).
position:
required: false
default: ''
description:
- "Define the whole position of users object inside the LDAP tree,
e.g. C(cn=employee,cn=users,ou=school,dc=example,dc=com)."
update_password:
required: false
default: always
description:
- "C(always) will update passwords if they differ.
C(on_create) will only set the password for newly created users."
version_added: "2.3"
ou:
required: false
default: ''
description:
- "Organizational Unit inside the LDAP Base DN, e.g. C(school) for
LDAP OU C(ou=school,dc=example,dc=com)."
subpath:
required: false
default: 'cn=users'
description:
- "LDAP subpath inside the organizational unit, e.g.
C(cn=teachers,cn=users) for LDAP container
C(cn=teachers,cn=users,dc=example,dc=com)."
'''
EXAMPLES = '''
# Create a user on a UCS
- udm_user:
name: FooBar
password: secure_password
firstname: Foo
lastname: Bar
# Create a user with the DN
# C(uid=foo,cn=teachers,cn=users,ou=school,dc=school,dc=example,dc=com)
- udm_user:
name: foo
password: secure_password
firstname: Foo
lastname: Bar
ou: school
subpath: 'cn=teachers,cn=users'
# or define the position
- udm_user:
name: foo
password: secure_password
firstname: Foo
lastname: Bar
position: 'cn=teachers,cn=users,ou=school,dc=school,dc=example,dc=com'
'''
RETURN = '''# '''
import crypt
from datetime import date, timedelta
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.univention_umc import (
umc_module_for_add,
umc_module_for_edit,
ldap_search,
base_dn,
)
def main():
expiry = date.strftime(date.today() + timedelta(days=365), "%Y-%m-%d")
module = AnsibleModule(
argument_spec = dict(
birthday = dict(default=None,
type='str'),
city = dict(default=None,
type='str'),
country = dict(default=None,
type='str'),
department_number = dict(default=None,
type='str',
aliases=['departmentNumber']),
description = dict(default=None,
type='str'),
display_name = dict(default=None,
type='str',
aliases=['displayName']),
email = dict(default=[''],
type='list'),
employee_number = dict(default=None,
type='str',
aliases=['employeeNumber']),
employee_type = dict(default=None,
type='str',
aliases=['employeeType']),
firstname = dict(default=None,
type='str'),
gecos = dict(default=None,
type='str'),
groups = dict(default=[],
type='list'),
home_share = dict(default=None,
type='str',
aliases=['homeShare']),
home_share_path = dict(default=None,
type='str',
aliases=['homeSharePath']),
home_telephone_number = dict(default=[],
type='list',
aliases=['homeTelephoneNumber']),
homedrive = dict(default=None,
type='str'),
lastname = dict(default=None,
type='str'),
mail_alternative_address= dict(default=[],
type='list',
aliases=['mailAlternativeAddress']),
mail_home_server = dict(default=None,
type='str',
aliases=['mailHomeServer']),
mail_primary_address = dict(default=None,
type='str',
aliases=['mailPrimaryAddress']),
mobile_telephone_number = dict(default=[],
type='list',
aliases=['mobileTelephoneNumber']),
organisation = dict(default=None,
type='str'),
overridePWHistory = dict(default=False,
type='bool',
aliases=['override_pw_history']),
overridePWLength = dict(default=False,
type='bool',
aliases=['override_pw_length']),
pager_telephonenumber = dict(default=[],
type='list',
aliases=['pagerTelephonenumber']),
password = dict(default=None,
type='str',
no_log=True),
phone = dict(default=[],
type='list'),
postcode = dict(default=None,
type='str'),
primary_group = dict(default=None,
type='str',
aliases=['primaryGroup']),
profilepath = dict(default=None,
type='str'),
pwd_change_next_login = dict(default=None,
type='str',
choices=['0', '1'],
aliases=['pwdChangeNextLogin']),
room_number = dict(default=None,
type='str',
aliases=['roomNumber']),
samba_privileges = dict(default=[],
type='list',
aliases=['sambaPrivileges']),
samba_user_workstations = dict(default=[],
type='list',
aliases=['sambaUserWorkstations']),
sambahome = dict(default=None,
type='str'),
scriptpath = dict(default=None,
type='str'),
secretary = dict(default=[],
type='list'),
serviceprovider = dict(default=[''],
type='list'),
shell = dict(default='/bin/bash',
type='str'),
street = dict(default=None,
type='str'),
title = dict(default=None,
type='str'),
unixhome = dict(default=None,
type='str'),
userexpiry = dict(default=expiry,
type='str'),
username = dict(required=True,
aliases=['name'],
type='str'),
position = dict(default='',
type='str'),
update_password = dict(default='always',
choices=['always', 'on_create'],
type='str'),
ou = dict(default='',
type='str'),
subpath = dict(default='cn=users',
type='str'),
state = dict(default='present',
choices=['present', 'absent'],
type='str')
),
supports_check_mode=True,
required_if = ([
('state', 'present', ['firstname', 'lastname', 'password'])
])
)
username = module.params['username']
position = module.params['position']
ou = module.params['ou']
subpath = module.params['subpath']
state = module.params['state']
changed = False
users = list(ldap_search(
'(&(objectClass=posixAccount)(uid={}))'.format(username),
attr=['uid']
))
if position != '':
container = position
else:
if ou != '':
ou = 'ou={},'.format(ou)
if subpath != '':
subpath = '{},'.format(subpath)
container = '{}{}{}'.format(subpath, ou, base_dn())
user_dn = 'uid={},{}'.format(username, container)
exists = bool(len(users))
if state == 'present':
try:
if not exists:
obj = umc_module_for_add('users/user', container)
else:
obj = umc_module_for_edit('users/user', user_dn)
if module.params['displayName'] is None:
module.params['displayName'] = '{} {}'.format(
module.params['firstname'],
module.params['lastname']
)
if module.params['unixhome'] is None:
module.params['unixhome'] = '/home/{}'.format(
module.params['username']
)
for k in obj.keys():
if (k != 'password' and
k != 'groups' and
k != 'overridePWHistory' and
k in module.params and
module.params[k] is not None):
obj[k] = module.params[k]
# handle some special values
obj['e-mail'] = module.params['email']
password = module.params['password']
if obj['password'] is None:
obj['password'] = password
if module.params['update_password'] == 'always':
old_password = obj['password'].split('}', 2)[1]
if crypt.crypt(password, old_password) != old_password:
obj['overridePWHistory'] = module.params['overridePWHistory']
obj['overridePWLength'] = module.params['overridePWLength']
obj['password'] = password
diff = obj.diff()
if exists:
for k in obj.keys():
if obj.hasChanged(k):
changed = True
else:
changed = True
if not module.check_mode:
if not exists:
obj.create()
elif changed:
obj.modify()
except:
module.fail_json(
msg="Creating/editing user {} in {} failed".format(
username,
container
)
)
try:
groups = module.params['groups']
if groups:
filter = '(&(objectClass=posixGroup)(|(cn={})))'.format(
')(cn='.join(groups)
)
group_dns = list(ldap_search(filter, attr=['dn']))
for dn in group_dns:
grp = umc_module_for_edit('groups/group', dn[0])
if user_dn not in grp['users']:
grp['users'].append(user_dn)
if not module.check_mode:
grp.modify()
changed = True
except:
module.fail_json(
msg="Adding groups to user {} failed".format(username)
)
if state == 'absent' and exists:
try:
obj = umc_module_for_edit('users/user', user_dn)
if not module.check_mode:
obj.remove()
changed = True
except:
module.fail_json(
msg="Removing user {} failed".format(username)
)
module.exit_json(
changed=changed,
username=username,
diff=diff,
container=container
)
if __name__ == '__main__':
main()
| gpl-3.0 |
XiaoxiaoLiu/morphology_analysis | bigneuron/reestimate_radius.py | 1 | 1506 | __author__ = 'xiaoxiaol'
__author__ = 'xiaoxiaol'
# run standardize swc to make sure swc files have one single root, and sorted, and has the valide type id ( 1~4)
import matplotlib.pyplot as plt
import seaborn as sb
import os
import os.path as path
import numpy as np
import pandas as pd
import platform
import sys
import glob
if (platform.system() == "Linux"):
WORK_PATH = "/local1/xiaoxiaol/work"
else:
WORK_PATH = "/Users/xiaoxiaoliu/work"
p = WORK_PATH + '/src/morphology_analysis'
sys.path.append(p)
import bigneuron.recon_prescreening as rp
import bigneuron.plot_distances as plt_dist
import blast_neuron.blast_neuron_comp as bn
### main
data_DIR = "/data/mat/xiaoxiaol/data/big_neuron/silver/0401_gold163_all_soma_sort"
output_dir = data_DIR
#run_consensus(data_DIR, output_dir)
os.system("rm "+data_DIR+"/qsub2/*.qsub")
os.system("rm "+data_DIR+"/qsub2/*.o*")
for item in os.listdir(data_DIR):
folder_name = os.path.join(data_DIR, item)
if os.path.isdir(folder_name):
print folder_name
imagefile = glob.glob(folder_name+'/*.v3dpbd')
imagefile.extend(glob.glob(folder_name+'/*.v3draw'))
files =glob.glob(folder_name+'/*.strict.swc')
if len(files)>0 and len(imagefile)>0:
gs_swc_file =files[0]
if not os.path.exists(gs_swc_file+".out.swc"):
bn.estimate_radius(input_image=imagefile[0], input_swc_path=gs_swc_file,bg_th=40, GEN_QSUB = 0, qsub_script_dir= output_dir+"/qsub2", id=None)
| gpl-3.0 |
siutanwong/scikit-learn | examples/text/document_clustering.py | 230 | 8356 | """
=======================================
Clustering text documents using k-means
=======================================
This is an example showing how the scikit-learn can be used to cluster
documents by topics using a bag-of-words approach. This example uses
a scipy.sparse matrix to store the features instead of standard numpy arrays.
Two feature extraction methods can be used in this example:
- TfidfVectorizer uses a in-memory vocabulary (a python dict) to map the most
frequent words to features indices and hence compute a word occurrence
frequency (sparse) matrix. The word frequencies are then reweighted using
the Inverse Document Frequency (IDF) vector collected feature-wise over
the corpus.
- HashingVectorizer hashes word occurrences to a fixed dimensional space,
possibly with collisions. The word count vectors are then normalized to
each have l2-norm equal to one (projected to the euclidean unit-ball) which
seems to be important for k-means to work in high dimensional space.
HashingVectorizer does not provide IDF weighting as this is a stateless
model (the fit method does nothing). When IDF weighting is needed it can
be added by pipelining its output to a TfidfTransformer instance.
Two algorithms are demoed: ordinary k-means and its more scalable cousin
minibatch k-means.
Additionally, latent sematic analysis can also be used to reduce dimensionality
and discover latent patterns in the data.
It can be noted that k-means (and minibatch k-means) are very sensitive to
feature scaling and that in this case the IDF weighting helps improve the
quality of the clustering by quite a lot as measured against the "ground truth"
provided by the class label assignments of the 20 newsgroups dataset.
This improvement is not visible in the Silhouette Coefficient which is small
for both as this measure seem to suffer from the phenomenon called
"Concentration of Measure" or "Curse of Dimensionality" for high dimensional
datasets such as text data. Other measures such as V-measure and Adjusted Rand
Index are information theoretic based evaluation scores: as they are only based
on cluster assignments rather than distances, hence not affected by the curse
of dimensionality.
Note: as k-means is optimizing a non-convex objective function, it will likely
end up in a local optimum. Several runs with independent random init might be
necessary to get a good convergence.
"""
# Author: Peter Prettenhofer <[email protected]>
# Lars Buitinck <[email protected]>
# License: BSD 3 clause
from __future__ import print_function
from sklearn.datasets import fetch_20newsgroups
from sklearn.decomposition import TruncatedSVD
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.feature_extraction.text import HashingVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import Normalizer
from sklearn import metrics
from sklearn.cluster import KMeans, MiniBatchKMeans
import logging
from optparse import OptionParser
import sys
from time import time
import numpy as np
# Display progress logs on stdout
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s')
# parse commandline arguments
op = OptionParser()
op.add_option("--lsa",
dest="n_components", type="int",
help="Preprocess documents with latent semantic analysis.")
op.add_option("--no-minibatch",
action="store_false", dest="minibatch", default=True,
help="Use ordinary k-means algorithm (in batch mode).")
op.add_option("--no-idf",
action="store_false", dest="use_idf", default=True,
help="Disable Inverse Document Frequency feature weighting.")
op.add_option("--use-hashing",
action="store_true", default=False,
help="Use a hashing feature vectorizer")
op.add_option("--n-features", type=int, default=10000,
help="Maximum number of features (dimensions)"
" to extract from text.")
op.add_option("--verbose",
action="store_true", dest="verbose", default=False,
help="Print progress reports inside k-means algorithm.")
print(__doc__)
op.print_help()
(opts, args) = op.parse_args()
if len(args) > 0:
op.error("this script takes no arguments.")
sys.exit(1)
###############################################################################
# Load some categories from the training set
categories = [
'alt.atheism',
'talk.religion.misc',
'comp.graphics',
'sci.space',
]
# Uncomment the following to do the analysis on all the categories
#categories = None
print("Loading 20 newsgroups dataset for categories:")
print(categories)
dataset = fetch_20newsgroups(subset='all', categories=categories,
shuffle=True, random_state=42)
print("%d documents" % len(dataset.data))
print("%d categories" % len(dataset.target_names))
print()
labels = dataset.target
true_k = np.unique(labels).shape[0]
print("Extracting features from the training dataset using a sparse vectorizer")
t0 = time()
if opts.use_hashing:
if opts.use_idf:
# Perform an IDF normalization on the output of HashingVectorizer
hasher = HashingVectorizer(n_features=opts.n_features,
stop_words='english', non_negative=True,
norm=None, binary=False)
vectorizer = make_pipeline(hasher, TfidfTransformer())
else:
vectorizer = HashingVectorizer(n_features=opts.n_features,
stop_words='english',
non_negative=False, norm='l2',
binary=False)
else:
vectorizer = TfidfVectorizer(max_df=0.5, max_features=opts.n_features,
min_df=2, stop_words='english',
use_idf=opts.use_idf)
X = vectorizer.fit_transform(dataset.data)
print("done in %fs" % (time() - t0))
print("n_samples: %d, n_features: %d" % X.shape)
print()
if opts.n_components:
print("Performing dimensionality reduction using LSA")
t0 = time()
# Vectorizer results are normalized, which makes KMeans behave as
# spherical k-means for better results. Since LSA/SVD results are
# not normalized, we have to redo the normalization.
svd = TruncatedSVD(opts.n_components)
normalizer = Normalizer(copy=False)
lsa = make_pipeline(svd, normalizer)
X = lsa.fit_transform(X)
print("done in %fs" % (time() - t0))
explained_variance = svd.explained_variance_ratio_.sum()
print("Explained variance of the SVD step: {}%".format(
int(explained_variance * 100)))
print()
###############################################################################
# Do the actual clustering
if opts.minibatch:
km = MiniBatchKMeans(n_clusters=true_k, init='k-means++', n_init=1,
init_size=1000, batch_size=1000, verbose=opts.verbose)
else:
km = KMeans(n_clusters=true_k, init='k-means++', max_iter=100, n_init=1,
verbose=opts.verbose)
print("Clustering sparse data with %s" % km)
t0 = time()
km.fit(X)
print("done in %0.3fs" % (time() - t0))
print()
print("Homogeneity: %0.3f" % metrics.homogeneity_score(labels, km.labels_))
print("Completeness: %0.3f" % metrics.completeness_score(labels, km.labels_))
print("V-measure: %0.3f" % metrics.v_measure_score(labels, km.labels_))
print("Adjusted Rand-Index: %.3f"
% metrics.adjusted_rand_score(labels, km.labels_))
print("Silhouette Coefficient: %0.3f"
% metrics.silhouette_score(X, km.labels_, sample_size=1000))
print()
if not opts.use_hashing:
print("Top terms per cluster:")
if opts.n_components:
original_space_centroids = svd.inverse_transform(km.cluster_centers_)
order_centroids = original_space_centroids.argsort()[:, ::-1]
else:
order_centroids = km.cluster_centers_.argsort()[:, ::-1]
terms = vectorizer.get_feature_names()
for i in range(true_k):
print("Cluster %d:" % i, end='')
for ind in order_centroids[i, :10]:
print(' %s' % terms[ind], end='')
print()
| bsd-3-clause |
COL-IU/XLSearch | xlsearch_train.py | 1 | 5042 | import sys
import pickle
import os
import getopt
from time import ctime
import numpy as np
usage = '''
USAGE: python xlsearch_train.py -l [path to xlsearch library]
-p [parameter file]
-o [output file]'''
(pairs, args) = getopt.getopt(sys.argv[1:], 'l:p:o:')
cmd_arg = dict()
for i in range(len(pairs)):
cmd_arg[pairs[i][0]] = pairs[i][1]
if len(cmd_arg) != 3:
print usage
sys.exit(1)
lib_path = cmd_arg['-l']
param_file = cmd_arg['-p']
output_file = cmd_arg['-o']
sys.path.append(lib_path)
from utility import *
from index import EnumIndexBuilder
from fastareader import FastaReader
print 'XLSearch, version 1.0'
print 'Copyright of School of Informatics and Computing, Indiana University'
print 'Current time %s' % ctime()
print 'Training logistic regression models using authetic true-true PSMs...'
print '\nReading paramters from: %s...' % param_file
[param, mass] = read_param(param_file)
param['ntermxlink'] = False
param['neutral_loss']['h2o_loss']['aa'] = set('DEST')
param['neutral_loss']['nh3_loss']['aa'] = set('KNQR')
param['neutral_loss']['h2o_gain']['aa'] = set()
mass['C'] = 103.009184
print 'Reading parameters done!'
print '\nReading MSMS spectra files from directory: %s...' % param['ms_data']
spec_dict = read_spec(param['ms_data'], param, mass)
pickle.dump(spec_dict, file('spectra.pickle', 'w'))
print 'Total number of spectra: %d' % len(spec_dict)
print 'Reading MSMS spectra files done!'
print '\nDeisotoping MSMS spectra...'
spec_dict = pickle.load(file('spectra.pickle'))
deisotoped = dict()
titles = spec_dict.keys()
for i in range(len(titles)):
title = titles[i]
(one, align) = spec_dict[title].deisotope(mass, 4, 0.02)
deisotoped[title] = one
pickle.dump(deisotoped, file('deisotoped.pickle', 'w'))
deisotoped = pickle.load(file('deisotoped.pickle'))
spec_dict = deisotoped
print 'Deisotoping MSMS spectra done!'
print 'Current time %s' % ctime()
print '\nBuilding index for all possible inter-peptide cross-links...'
index = EnumIndexBuilder(param['target_database'], spec_dict, mass, param)
pickle.dump(index, file('index.pickle', 'w'))
index = pickle.load(file('index.pickle'))
print 'Building index done!'
print 'Current time %s' % ctime()
print '\nComputing features for candidate PSMs for query spectra...'
results = []
titles = []
for title in index.search_index.keys():
if len(index.search_index[title]) != 0:
titles.append(title)
length = len(titles)
for i in range(0, length):
print '%d / %d' % (i, length)
sys.stdout.flush()
title = titles[i]
result = get_matches_per_spec(mass, param, index, title)
result = [title, result]
results.append(result)
print 'Computing features done!\n'
print 'Current time: %s' % ctime()
pickle.dump(results, file('results.pickle', 'w'))
results = pickle.load(file('results.pickle'))
print 'Extracting authentic true-true PSMs...'
true_true = get_true_true(results, index, param, mass)
pickle.dump(true_true, file('TT.pickle', 'w'))
print 'Extracting authentic true-true PSMs done!'
print 'Extracting true-false PSMs based on true-true PSMs as seeds...'
true_false = get_true_false(true_true, param, mass)
pickle.dump(true_false, file('TF.pickle', 'w'))
print 'Extracting true-false PSMs done!'
print 'Extracting false-false PSMs based on true-true PSMs as seeds...'
false_false = get_false_false(true_true, param, mass)
pickle.dump(false_false, file('FF.pickle', 'w'))
print 'Extracting false-false PSMs done!'
print 'Computing feature matrix for true-true, true-false, false-false PSMs...'
X_true_true = get_feature_matrix(true_true)
X_true_false = get_feature_matrix(true_false)
X_false_false = get_feature_matrix(false_false)
X_TT_TF = np.concatenate((X_true_true, X_true_false), axis = 0)
y_TT_TF = []
y_TT_TF.extend([1.0] * len(true_true))
y_TT_TF.extend([0.0] * len(true_false))
y_TT_TF = np.asarray(y_TT_TF)
y_TT_TF = y_TT_TF.T
X_TF_FF = np.concatenate((X_true_false, X_false_false), axis = 0)
y_TF_FF = []
y_TF_FF.extend([1.0] * len(true_false))
y_TF_FF.extend([0.0] * len(false_false))
y_TF_FF = np.asarray(y_TF_FF)
y_TF_FF = y_TF_FF.T
print 'Computing features done!'
from sklearn import linear_model
log_reg = linear_model.LogisticRegression()
log_reg.fit(X_TT_TF, y_TT_TF)
model_TT_TF = []
model_TT_TF.extend(log_reg.intercept_.tolist())
model_TT_TF.extend(log_reg.coef_.tolist())
log_reg = linear_model.LogisticRegression()
log_reg.fit(X_TF_FF, y_TF_FF)
model_TF_FF = []
model_TF_FF.extend(log_reg.intercept_.tolist())
model_TF_FF.extend(log_reg.coef_.tolist())
f = open(output_file, 'w')
f.write('# Classifier I (TT-TF) coefficients')
for i in range(len(model_TT_TF)):
f.write('CI%02d\t')
f.write('%.60f\n' % model_TT_TF[i])
f.write('# Classifier II (TF-FF) coefficients')
for i in range(len(model_TF_FF)):
f.write('CII%02d\t')
f.write('%.60f\n' % model_TF_FF[i])
f.write('nTT\t%d\n' % len(true_true))
f.write('nTF\t%d\n' % len(true_false))
f.write('nFF\t%d\n' % len(false_false))
f.close()
print 'XLSearch train mode finished!'
| mit |
xin3liang/platform_external_chromium_org | tools/perf/page_sets/tough_texture_upload_cases.py | 34 | 1457 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class ToughTextureUploadCasesPage(page_module.Page):
def __init__(self, url, page_set):
super(
ToughTextureUploadCasesPage,
self).__init__(
url=url,
page_set=page_set)
def RunSmoothness(self, action_runner):
interaction = action_runner.BeginGestureInteraction(
'ScrollAction', is_smooth=True)
action_runner.ScrollPage()
interaction.End()
class ToughTextureUploadCasesPageSet(page_set_module.PageSet):
"""
Description: A collection of texture upload performance tests
"""
def __init__(self):
super(ToughTextureUploadCasesPageSet, self).__init__()
urls_list = [
'file://tough_texture_upload_cases/background_color_animation.html',
# pylint: disable=C0301
'file://tough_texture_upload_cases/background_color_animation_and_transform_animation.html',
# pylint: disable=C0301
'file://tough_texture_upload_cases/background_color_animation_with_gradient.html',
# pylint: disable=C0301
'file://tough_texture_upload_cases/background_color_animation_with_gradient_and_transform_animation.html']
for url in urls_list:
self.AddPage(ToughTextureUploadCasesPage(url, self))
| bsd-3-clause |
darvelo/chime | fabfile/fabconf.py | 2 | 2774 | # -*- coding: utf-8 -*-
'''
--------------------------------------------------------------------------------------
project_conf.py
--------------------------------------------------------------------------------------
Configuration settings that detail EC2 instances. Note that we are not using
the built-in env from fabric.api -- there are no official recommendations on
best practice. See: http://lists.gnu.org/archive/html/fab-user/2013-11/msg00006.html
'''
import os
import os.path
import pwd
fabconf = {}
# Do not edit
fabconf['FAB_CONFIG_PATH'] = os.path.dirname(__file__)
fabconf['FAB_HOSTS_FILE'] = fabconf.get('FAB_CONFIG_PATH') + '/hosts.txt'
# Project name
fabconf['PROJECT_NAME'] = os.environ.get('PROJECT_NAME', 'chime')
fabconf['GIT_BRANCH'] = 'master'
# Username for connecting to EC2 instaces - Do not edit unless you have a reason to
fabconf['SERVER_USERNAME'] = 'ubuntu'
# Don't edit. Full path of the ssh key you use to connect to EC2 instances
fabconf['SSH_PRIVATE_KEY_PATH'] = os.environ.get('SSH_PRIVATE_KEY_PATH')
# Where to install apps
fabconf['APPS_DIR'] = "/home/{user}/web".format(user=fabconf.get('SERVER_USERNAME'))
# Where your project will installed: /<APPS_DIR>/<PROJECT_NAME>
fabconf['PROJECT_PATH'] = '{apps}/{project}'.format(
apps=fabconf.get('APPS_DIR'),
project=fabconf.get('PROJECT_NAME')
)
# Space-delimited list of app domains
fabconf['DOMAINS'] = os.environ.get('DOMAINS')
# Name tag for your server instance on EC2
# Use recommendation from https://docs.python.org/2/library/os.html#os.getlogin
# to get around ioctl error thrown by os.getlogin() in a cron job.
fabconf['INSTANCE_NAME_TAG'] = os.environ.get('INSTANCE_NAME_TAG', 'ChimeCMS Autotest')
fabconf['INSTANCE_CREATED_BY'] = '{}-{}'.format(pwd.getpwuid(os.getuid())[0], os.uname()[1])
# EC2 key.
fabconf['AWS_ACCESS_KEY'] = os.environ['AWS_ACCESS_KEY']
# EC2 secret.
fabconf['AWS_SECRET_KEY'] = os.environ['AWS_SECRET_KEY']
#EC2 region. Defaults to us-east-1
fabconf['EC2_REGION'] = os.environ.get('EC2_REGION', 'us-east-1')
# AMI name. Either pass in a comma-delimited list of values.
# Defaults to Ubuntu 14.04
fabconf['EC2_AMIS'] = os.environ.get('EC2_AMIS', 'ami-6725ea0c').split(',')
# Name of the keypair you use in EC2.
fabconf['EC2_KEY_PAIR'] = os.environ.get('EC2_KEY_PAIR', 'cfa-chime-keypair')
# Name of the security group.
fabconf['AWS_SECURITY_GROUPS'] = os.environ.get('AWS_SECURITY_GROUPS', 'default')
# API Name of instance type. Defaults to t2.micro
fabconf['EC2_INSTANCE_TYPE'] = os.environ.get('EC2_INSTANCE_TYPE', 't2.small')
# Assorted other config (described in AcceptanceTests.md) used here to fail fast
fabconf['TESTING_EMAIL'] = os.environ['TESTING_EMAIL']
fabconf['TESTING_PASSWORD'] = os.environ['TESTING_PASSWORD']
| bsd-3-clause |
heia-fr/sirano | sirano/plugins/actions/raw_payload.py | 1 | 1377 | # -*- coding: utf-8 -*-
#
# This file is a part of Sirano.
#
# Copyright (C) 2015 HES-SO // HEIA-FR
# Copyright (C) 2015 Loic Gremaud <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from sirano.action import Action
class RTPPayloadAction(Action):
"""Anonymize the RTP payload content field"""
name = "raw-payload"
def __init__(self, app):
super(RTPPayloadAction, self).__init__(app)
def anonymize(self, value):
value_len = len(value)
text = "ANONYMIZED BY SIRANO "
text_len = len(text)
s = ''
for i in range(value_len):
s += text[i % text_len]
return s
def discover(self, value):
pass
| gpl-2.0 |
jdilallo/jdilallo-test | examples/dfp/v201311/custom_targeting_service/get_custom_targeting_values_by_statement.py | 1 | 2247 | #!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example gets custom targeting values for the given predefined custom
targeting key.
To create custom targeting values, run
create_custom_targeting_keys_and_values.py. To determine which custom
targeting keys exist, run get_all_custom_targeting_keys_and_values.py."""
__author__ = ('Nicholas Chen',
'Joseph DiLallo')
# Import appropriate classes from the client library.
from googleads import dfp
CUSTOM_TARGETING_KEY_ID = 'INSERT_CUSTOM_TARGETING_KEY_ID_HERE'
def main(client, key_id):
# Initialize appropriate service.
custom_targeting_service = client.GetService(
'CustomTargetingService', version='v201311')
values = [{
'key': 'keyId',
'value': {
'xsi_type': 'NumberValue',
'value': key_id
}
}]
query = 'WHERE customTargetingKeyId = :keyId'
statement = dfp.FilterStatement(query, values)
# Get custom targeting values by statement.
while True:
response = custom_targeting_service.getCustomTargetingValuesByStatement(
statement.ToStatement())
if 'results' in response:
# Display results.
for value in response['results']:
print ('Custom targeting value with id \'%s\', name \'%s\', and display'
' name \'%s\' was found.'
% (value['id'], value['name'], value['displayName']))
statement.offset += dfp.SUGGESTED_PAGE_LIMIT
else:
break
print '\nNumber of results found: %s' % response['totalResultSetSize']
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client, CUSTOM_TARGETING_KEY_ID)
| apache-2.0 |
KousikaGanesh/purchaseandInventory | openerp/addons/crm/wizard/crm_phonecall_to_meeting.py | 56 | 2730 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
from openerp.tools.translate import _
class crm_phonecall2meeting(osv.osv_memory):
""" Phonecall to Meeting """
_name = 'crm.phonecall2meeting'
_description = 'Phonecall To Meeting'
def action_cancel(self, cr, uid, ids, context=None):
"""
Closes Phonecall to Meeting form
@param self: The object pointer
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param ids: List of Phonecall to Meeting IDs
@param context: A standard dictionary for contextual values
"""
return {'type':'ir.actions.act_window_close'}
def action_make_meeting(self, cr, uid, ids, context=None):
""" This opens Meeting's calendar view to schedule meeting on current Phonecall
@return : Dictionary value for created Meeting view
"""
res = {}
phonecall_id = context and context.get('active_id', False) or False
if phonecall_id:
phonecall = self.pool.get('crm.phonecall').browse(cr, uid, phonecall_id, context)
res = self.pool.get('ir.actions.act_window').for_xml_id(cr, uid, 'base_calendar', 'action_crm_meeting', context)
res['context'] = {
'default_phonecall_id': phonecall.id,
'default_partner_id': phonecall.partner_id and phonecall.partner_id.id or False,
'default_user_id': uid,
'default_email_from': phonecall.email_from,
'default_state': 'open',
'default_name': phonecall.name,
}
return res
crm_phonecall2meeting()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
gengliangwang/spark | python/pyspark/serializers.py | 10 | 20586 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
PySpark supports custom serializers for transferring data; this can improve
performance.
By default, PySpark uses :class:`PickleSerializer` to serialize objects using Python's
`cPickle` serializer, which can serialize nearly any Python object.
Other serializers, like :class:`MarshalSerializer`, support fewer datatypes but can be
faster.
Examples
--------
The serializer is chosen when creating :class:`SparkContext`:
>>> from pyspark.context import SparkContext
>>> from pyspark.serializers import MarshalSerializer
>>> sc = SparkContext('local', 'test', serializer=MarshalSerializer())
>>> sc.parallelize(list(range(1000))).map(lambda x: 2 * x).take(10)
[0, 2, 4, 6, 8, 10, 12, 14, 16, 18]
>>> sc.stop()
PySpark serializes objects in batches; by default, the batch size is chosen based
on the size of objects and is also configurable by SparkContext's `batchSize`
parameter:
>>> sc = SparkContext('local', 'test', batchSize=2)
>>> rdd = sc.parallelize(range(16), 4).map(lambda x: x)
Behind the scenes, this creates a JavaRDD with four partitions, each of
which contains two batches of two objects:
>>> rdd.glom().collect()
[[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11], [12, 13, 14, 15]]
>>> int(rdd._jrdd.count())
8
>>> sc.stop()
"""
import sys
from itertools import chain, product
import marshal
import struct
import types
import collections
import zlib
import itertools
import pickle
pickle_protocol = pickle.HIGHEST_PROTOCOL
from pyspark import cloudpickle
from pyspark.util import print_exec
__all__ = ["PickleSerializer", "MarshalSerializer", "UTF8Deserializer"]
class SpecialLengths(object):
END_OF_DATA_SECTION = -1
PYTHON_EXCEPTION_THROWN = -2
TIMING_DATA = -3
END_OF_STREAM = -4
NULL = -5
START_ARROW_STREAM = -6
class Serializer(object):
def dump_stream(self, iterator, stream):
"""
Serialize an iterator of objects to the output stream.
"""
raise NotImplementedError
def load_stream(self, stream):
"""
Return an iterator of deserialized objects from the input stream.
"""
raise NotImplementedError
def _load_stream_without_unbatching(self, stream):
"""
Return an iterator of deserialized batches (iterable) of objects from the input stream.
If the serializer does not operate on batches the default implementation returns an
iterator of single element lists.
"""
return map(lambda x: [x], self.load_stream(stream))
# Note: our notion of "equality" is that output generated by
# equal serializers can be deserialized using the same serializer.
# This default implementation handles the simple cases;
# subclasses should override __eq__ as appropriate.
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return "%s()" % self.__class__.__name__
def __hash__(self):
return hash(str(self))
class FramedSerializer(Serializer):
"""
Serializer that writes objects as a stream of (length, data) pairs,
where `length` is a 32-bit integer and data is `length` bytes.
"""
def dump_stream(self, iterator, stream):
for obj in iterator:
self._write_with_length(obj, stream)
def load_stream(self, stream):
while True:
try:
yield self._read_with_length(stream)
except EOFError:
return
def _write_with_length(self, obj, stream):
serialized = self.dumps(obj)
if serialized is None:
raise ValueError("serialized value should not be None")
if len(serialized) > (1 << 31):
raise ValueError("can not serialize object larger than 2G")
write_int(len(serialized), stream)
stream.write(serialized)
def _read_with_length(self, stream):
length = read_int(stream)
if length == SpecialLengths.END_OF_DATA_SECTION:
raise EOFError
elif length == SpecialLengths.NULL:
return None
obj = stream.read(length)
if len(obj) < length:
raise EOFError
return self.loads(obj)
def dumps(self, obj):
"""
Serialize an object into a byte array.
When batching is used, this will be called with an array of objects.
"""
raise NotImplementedError
def loads(self, obj):
"""
Deserialize an object from a byte array.
"""
raise NotImplementedError
class BatchedSerializer(Serializer):
"""
Serializes a stream of objects in batches by calling its wrapped
Serializer with streams of objects.
"""
UNLIMITED_BATCH_SIZE = -1
UNKNOWN_BATCH_SIZE = 0
def __init__(self, serializer, batchSize=UNLIMITED_BATCH_SIZE):
self.serializer = serializer
self.batchSize = batchSize
def _batched(self, iterator):
if self.batchSize == self.UNLIMITED_BATCH_SIZE:
yield list(iterator)
elif hasattr(iterator, "__len__") and hasattr(iterator, "__getslice__"):
n = len(iterator)
for i in range(0, n, self.batchSize):
yield iterator[i: i + self.batchSize]
else:
items = []
count = 0
for item in iterator:
items.append(item)
count += 1
if count == self.batchSize:
yield items
items = []
count = 0
if items:
yield items
def dump_stream(self, iterator, stream):
self.serializer.dump_stream(self._batched(iterator), stream)
def load_stream(self, stream):
return chain.from_iterable(self._load_stream_without_unbatching(stream))
def _load_stream_without_unbatching(self, stream):
return self.serializer.load_stream(stream)
def __repr__(self):
return "BatchedSerializer(%s, %d)" % (str(self.serializer), self.batchSize)
class FlattenedValuesSerializer(BatchedSerializer):
"""
Serializes a stream of list of pairs, split the list of values
which contain more than a certain number of objects to make them
have similar sizes.
"""
def __init__(self, serializer, batchSize=10):
BatchedSerializer.__init__(self, serializer, batchSize)
def _batched(self, iterator):
n = self.batchSize
for key, values in iterator:
for i in range(0, len(values), n):
yield key, values[i:i + n]
def load_stream(self, stream):
return self.serializer.load_stream(stream)
def __repr__(self):
return "FlattenedValuesSerializer(%s, %d)" % (self.serializer, self.batchSize)
class AutoBatchedSerializer(BatchedSerializer):
"""
Choose the size of batch automatically based on the size of object
"""
def __init__(self, serializer, bestSize=1 << 16):
BatchedSerializer.__init__(self, serializer, self.UNKNOWN_BATCH_SIZE)
self.bestSize = bestSize
def dump_stream(self, iterator, stream):
batch, best = 1, self.bestSize
iterator = iter(iterator)
while True:
vs = list(itertools.islice(iterator, batch))
if not vs:
break
bytes = self.serializer.dumps(vs)
write_int(len(bytes), stream)
stream.write(bytes)
size = len(bytes)
if size < best:
batch *= 2
elif size > best * 10 and batch > 1:
batch //= 2
def __repr__(self):
return "AutoBatchedSerializer(%s)" % self.serializer
class CartesianDeserializer(Serializer):
"""
Deserializes the JavaRDD cartesian() of two PythonRDDs.
Due to pyspark batching we cannot simply use the result of the Java RDD cartesian,
we additionally need to do the cartesian within each pair of batches.
"""
def __init__(self, key_ser, val_ser):
self.key_ser = key_ser
self.val_ser = val_ser
def _load_stream_without_unbatching(self, stream):
key_batch_stream = self.key_ser._load_stream_without_unbatching(stream)
val_batch_stream = self.val_ser._load_stream_without_unbatching(stream)
for (key_batch, val_batch) in zip(key_batch_stream, val_batch_stream):
# for correctness with repeated cartesian/zip this must be returned as one batch
yield product(key_batch, val_batch)
def load_stream(self, stream):
return chain.from_iterable(self._load_stream_without_unbatching(stream))
def __repr__(self):
return "CartesianDeserializer(%s, %s)" % \
(str(self.key_ser), str(self.val_ser))
class PairDeserializer(Serializer):
"""
Deserializes the JavaRDD zip() of two PythonRDDs.
Due to pyspark batching we cannot simply use the result of the Java RDD zip,
we additionally need to do the zip within each pair of batches.
"""
def __init__(self, key_ser, val_ser):
self.key_ser = key_ser
self.val_ser = val_ser
def _load_stream_without_unbatching(self, stream):
key_batch_stream = self.key_ser._load_stream_without_unbatching(stream)
val_batch_stream = self.val_ser._load_stream_without_unbatching(stream)
for (key_batch, val_batch) in zip(key_batch_stream, val_batch_stream):
# For double-zipped RDDs, the batches can be iterators from other PairDeserializer,
# instead of lists. We need to convert them to lists if needed.
key_batch = key_batch if hasattr(key_batch, '__len__') else list(key_batch)
val_batch = val_batch if hasattr(val_batch, '__len__') else list(val_batch)
if len(key_batch) != len(val_batch):
raise ValueError("Can not deserialize PairRDD with different number of items"
" in batches: (%d, %d)" % (len(key_batch), len(val_batch)))
# for correctness with repeated cartesian/zip this must be returned as one batch
yield zip(key_batch, val_batch)
def load_stream(self, stream):
return chain.from_iterable(self._load_stream_without_unbatching(stream))
def __repr__(self):
return "PairDeserializer(%s, %s)" % (str(self.key_ser), str(self.val_ser))
class NoOpSerializer(FramedSerializer):
def loads(self, obj):
return obj
def dumps(self, obj):
return obj
# Hack namedtuple, make it picklable
__cls = {} # type: ignore
def _restore(name, fields, value):
""" Restore an object of namedtuple"""
k = (name, fields)
cls = __cls.get(k)
if cls is None:
cls = collections.namedtuple(name, fields)
__cls[k] = cls
return cls(*value)
def _hack_namedtuple(cls):
""" Make class generated by namedtuple picklable """
name = cls.__name__
fields = cls._fields
def __reduce__(self):
return (_restore, (name, fields, tuple(self)))
cls.__reduce__ = __reduce__
cls._is_namedtuple_ = True
return cls
def _hijack_namedtuple():
""" Hack namedtuple() to make it picklable """
# hijack only one time
if hasattr(collections.namedtuple, "__hijack"):
return
global _old_namedtuple # or it will put in closure
global _old_namedtuple_kwdefaults # or it will put in closure too
def _copy_func(f):
return types.FunctionType(f.__code__, f.__globals__, f.__name__,
f.__defaults__, f.__closure__)
_old_namedtuple = _copy_func(collections.namedtuple)
_old_namedtuple_kwdefaults = collections.namedtuple.__kwdefaults__
def namedtuple(*args, **kwargs):
for k, v in _old_namedtuple_kwdefaults.items():
kwargs[k] = kwargs.get(k, v)
cls = _old_namedtuple(*args, **kwargs)
return _hack_namedtuple(cls)
# replace namedtuple with the new one
collections.namedtuple.__globals__["_old_namedtuple_kwdefaults"] = _old_namedtuple_kwdefaults
collections.namedtuple.__globals__["_old_namedtuple"] = _old_namedtuple
collections.namedtuple.__globals__["_hack_namedtuple"] = _hack_namedtuple
collections.namedtuple.__code__ = namedtuple.__code__
collections.namedtuple.__hijack = 1
# hack the cls already generated by namedtuple.
# Those created in other modules can be pickled as normal,
# so only hack those in __main__ module
for n, o in sys.modules["__main__"].__dict__.items():
if (type(o) is type and o.__base__ is tuple
and hasattr(o, "_fields")
and "__reduce__" not in o.__dict__):
_hack_namedtuple(o) # hack inplace
_hijack_namedtuple()
class PickleSerializer(FramedSerializer):
"""
Serializes objects using Python's pickle serializer:
http://docs.python.org/2/library/pickle.html
This serializer supports nearly any Python object, but may
not be as fast as more specialized serializers.
"""
def dumps(self, obj):
return pickle.dumps(obj, pickle_protocol)
def loads(self, obj, encoding="bytes"):
return pickle.loads(obj, encoding=encoding)
class CloudPickleSerializer(PickleSerializer):
def dumps(self, obj):
try:
return cloudpickle.dumps(obj, pickle_protocol)
except pickle.PickleError:
raise
except Exception as e:
emsg = str(e)
if "'i' format requires" in emsg:
msg = "Object too large to serialize: %s" % emsg
else:
msg = "Could not serialize object: %s: %s" % (e.__class__.__name__, emsg)
print_exec(sys.stderr)
raise pickle.PicklingError(msg)
class MarshalSerializer(FramedSerializer):
"""
Serializes objects using Python's Marshal serializer:
http://docs.python.org/2/library/marshal.html
This serializer is faster than PickleSerializer but supports fewer datatypes.
"""
def dumps(self, obj):
return marshal.dumps(obj)
def loads(self, obj):
return marshal.loads(obj)
class AutoSerializer(FramedSerializer):
"""
Choose marshal or pickle as serialization protocol automatically
"""
def __init__(self):
FramedSerializer.__init__(self)
self._type = None
def dumps(self, obj):
if self._type is not None:
return b'P' + pickle.dumps(obj, -1)
try:
return b'M' + marshal.dumps(obj)
except Exception:
self._type = b'P'
return b'P' + pickle.dumps(obj, -1)
def loads(self, obj):
_type = obj[0]
if _type == b'M':
return marshal.loads(obj[1:])
elif _type == b'P':
return pickle.loads(obj[1:])
else:
raise ValueError("invalid serialization type: %s" % _type)
class CompressedSerializer(FramedSerializer):
"""
Compress the serialized data
"""
def __init__(self, serializer):
FramedSerializer.__init__(self)
assert isinstance(serializer, FramedSerializer), "serializer must be a FramedSerializer"
self.serializer = serializer
def dumps(self, obj):
return zlib.compress(self.serializer.dumps(obj), 1)
def loads(self, obj):
return self.serializer.loads(zlib.decompress(obj))
def __repr__(self):
return "CompressedSerializer(%s)" % self.serializer
class UTF8Deserializer(Serializer):
"""
Deserializes streams written by String.getBytes.
"""
def __init__(self, use_unicode=True):
self.use_unicode = use_unicode
def loads(self, stream):
length = read_int(stream)
if length == SpecialLengths.END_OF_DATA_SECTION:
raise EOFError
elif length == SpecialLengths.NULL:
return None
s = stream.read(length)
return s.decode("utf-8") if self.use_unicode else s
def load_stream(self, stream):
try:
while True:
yield self.loads(stream)
except struct.error:
return
except EOFError:
return
def __repr__(self):
return "UTF8Deserializer(%s)" % self.use_unicode
def read_long(stream):
length = stream.read(8)
if not length:
raise EOFError
return struct.unpack("!q", length)[0]
def write_long(value, stream):
stream.write(struct.pack("!q", value))
def pack_long(value):
return struct.pack("!q", value)
def read_int(stream):
length = stream.read(4)
if not length:
raise EOFError
return struct.unpack("!i", length)[0]
def write_int(value, stream):
stream.write(struct.pack("!i", value))
def read_bool(stream):
length = stream.read(1)
if not length:
raise EOFError
return struct.unpack("!?", length)[0]
def write_with_length(obj, stream):
write_int(len(obj), stream)
stream.write(obj)
class ChunkedStream(object):
"""
This is a file-like object takes a stream of data, of unknown length, and breaks it into fixed
length frames. The intended use case is serializing large data and sending it immediately over
a socket -- we do not want to buffer the entire data before sending it, but the receiving end
needs to know whether or not there is more data coming.
It works by buffering the incoming data in some fixed-size chunks. If the buffer is full, it
first sends the buffer size, then the data. This repeats as long as there is more data to send.
When this is closed, it sends the length of whatever data is in the buffer, then that data, and
finally a "length" of -1 to indicate the stream has completed.
"""
def __init__(self, wrapped, buffer_size):
self.buffer_size = buffer_size
self.buffer = bytearray(buffer_size)
self.current_pos = 0
self.wrapped = wrapped
def write(self, bytes):
byte_pos = 0
byte_remaining = len(bytes)
while byte_remaining > 0:
new_pos = byte_remaining + self.current_pos
if new_pos < self.buffer_size:
# just put it in our buffer
self.buffer[self.current_pos:new_pos] = bytes[byte_pos:]
self.current_pos = new_pos
byte_remaining = 0
else:
# fill the buffer, send the length then the contents, and start filling again
space_left = self.buffer_size - self.current_pos
new_byte_pos = byte_pos + space_left
self.buffer[self.current_pos:self.buffer_size] = bytes[byte_pos:new_byte_pos]
write_int(self.buffer_size, self.wrapped)
self.wrapped.write(self.buffer)
byte_remaining -= space_left
byte_pos = new_byte_pos
self.current_pos = 0
def close(self):
# if there is anything left in the buffer, write it out first
if self.current_pos > 0:
write_int(self.current_pos, self.wrapped)
self.wrapped.write(self.buffer[:self.current_pos])
# -1 length indicates to the receiving end that we're done.
write_int(-1, self.wrapped)
self.wrapped.close()
@property
def closed(self):
"""
Return True if the `wrapped` object has been closed.
NOTE: this property is required by pyarrow to be used as a file-like object in
pyarrow.RecordBatchStreamWriter from ArrowStreamSerializer
"""
return self.wrapped.closed
if __name__ == '__main__':
import doctest
(failure_count, test_count) = doctest.testmod()
if failure_count:
sys.exit(-1)
| apache-2.0 |
hujiajie/pa-chromium | chrome/test/functional/search_engines.py | 79 | 3858 | #!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
import pyauto_functional # Must be imported before pyauto
import pyauto
import test_utils
class SearchEnginesTest(pyauto.PyUITest):
"""TestCase for Search Engines."""
_localhost_prefix = 'http://localhost:1000/'
def _GetSearchEngineWithKeyword(self, keyword):
"""Get search engine info and return an element that matches keyword.
Args:
keyword: Search engine keyword field.
Returns:
A search engine info dict or None.
"""
match_list = ([x for x in self.GetSearchEngineInfo()
if x['keyword'] == keyword])
if match_list:
return match_list[0]
return None
def Debug(self):
"""Test method for experimentation.
This method will not run automatically.
"""
while True:
raw_input('Interact with the browser and hit <enter>')
self.pprint(self.GetSearchEngineInfo())
def testDiscoverSearchEngine(self):
"""Test that chrome discovers youtube search engine after searching."""
# Take a snapshot of current search engine info.
info = self.GetSearchEngineInfo()
youtube = self._GetSearchEngineWithKeyword('youtube.com')
self.assertFalse(youtube)
# Use omnibox to invoke search engine discovery.
# Navigating using NavigateToURL does not currently invoke this logic.
self.SetOmniboxText('http://www.youtube.com')
self.OmniboxAcceptInput()
def InfoUpdated(old_info):
new_info = self.GetSearchEngineInfo()
if len(new_info) > len(old_info):
return True
return False
self.WaitUntil(lambda: InfoUpdated(info))
youtube = self._GetSearchEngineWithKeyword('youtube.com')
self.assertTrue(youtube)
self.assertTrue(re.search('youtube', youtube['short_name'],
re.IGNORECASE))
self.assertFalse(youtube['in_default_list'])
self.assertFalse(youtube['is_default'])
def testDeleteSearchEngine(self):
"""Test adding then deleting a search engine."""
self.AddSearchEngine(title='foo',
keyword='foo.com',
url='http://foo/?q=%s')
foo = self._GetSearchEngineWithKeyword('foo.com')
self.assertTrue(foo)
self.DeleteSearchEngine('foo.com')
foo = self._GetSearchEngineWithKeyword('foo.com')
self.assertFalse(foo)
def testMakeSearchEngineDefault(self):
"""Test adding then making a search engine default."""
self.AddSearchEngine(
title='foo',
keyword='foo.com',
url=self._localhost_prefix + '?q=%s')
foo = self._GetSearchEngineWithKeyword('foo.com')
self.assertTrue(foo)
self.assertFalse(foo['is_default'])
self.MakeSearchEngineDefault('foo.com')
foo = self._GetSearchEngineWithKeyword('foo.com')
self.assertTrue(foo)
self.assertTrue(foo['is_default'])
self.SetOmniboxText('foobar')
self.OmniboxAcceptInput()
self.assertEqual(self._localhost_prefix + '?q=foobar',
self.GetActiveTabURL().spec())
def testDefaultSearchEngines(self):
"""Test that we have 3 default search options."""
info = self.GetSearchEngineInfo()
self.assertEqual(len(info), 3)
# Verify that each can be used as the default search provider.
default_providers = ['google.com', 'yahoo.com', 'bing.com']
for keyword in default_providers:
self.MakeSearchEngineDefault(keyword)
search_engine = self._GetSearchEngineWithKeyword(keyword)
self.assertTrue(search_engine['is_default'])
self.SetOmniboxText('test search')
self.OmniboxAcceptInput()
self.assertTrue(re.search(keyword, self.GetActiveTabURL().spec()))
if __name__ == '__main__':
pyauto_functional.Main()
| bsd-3-clause |