🐙OctoPack
Collection
13 items
•
Updated
•
4
commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
a492e805fa51940d746a1d251232bc4f13417165 | fix waftools/man.py to install manpages again. | theeternalsw0rd/xmms2,dreamerc/xmms2,theeternalsw0rd/xmms2,six600110/xmms2,six600110/xmms2,dreamerc/xmms2,xmms2/xmms2-stable,chrippa/xmms2,oneman/xmms2-oneman,krad-radio/xmms2-krad,theefer/xmms2,theefer/xmms2,krad-radio/xmms2-krad,oneman/xmms2-oneman,theefer/xmms2,oneman/xmms2-oneman,six600110/xmms2,oneman/xmms2-oneman,chrippa/xmms2,theefer/xmms2,oneman/xmms2-oneman,oneman/xmms2-oneman,oneman/xmms2-oneman-old,theeternalsw0rd/xmms2,mantaraya36/xmms2-mantaraya36,krad-radio/xmms2-krad,oneman/xmms2-oneman-old,six600110/xmms2,theeternalsw0rd/xmms2,six600110/xmms2,chrippa/xmms2,krad-radio/xmms2-krad,mantaraya36/xmms2-mantaraya36,mantaraya36/xmms2-mantaraya36,xmms2/xmms2-stable,dreamerc/xmms2,dreamerc/xmms2,oneman/xmms2-oneman-old,mantaraya36/xmms2-mantaraya36,chrippa/xmms2,dreamerc/xmms2,xmms2/xmms2-stable,chrippa/xmms2,theefer/xmms2,xmms2/xmms2-stable,chrippa/xmms2,krad-radio/xmms2-krad,oneman/xmms2-oneman,oneman/xmms2-oneman-old,theeternalsw0rd/xmms2,theefer/xmms2,xmms2/xmms2-stable,krad-radio/xmms2-krad,theeternalsw0rd/xmms2,mantaraya36/xmms2-mantaraya36,six600110/xmms2,oneman/xmms2-oneman-old,mantaraya36/xmms2-mantaraya36,theefer/xmms2,mantaraya36/xmms2-mantaraya36,xmms2/xmms2-stable | waftools/man.py | waftools/man.py | import Common, Object, Utils, Node, Params
import sys, os
import gzip
from misc import copyobj
def gzip_func(task):
env = task.m_env
infile = task.m_inputs[0].abspath(env)
outfile = task.m_outputs[0].abspath(env)
input = open(infile, 'r')
output = gzip.GzipFile(outfile, mode='w')
output.write(input.read())
return 0
class manobj(copyobj):
def __init__(self, section=1, type='none'):
copyobj.__init__(self, type)
self.fun = gzip_func
self.files = []
self.section = section
def apply(self):
lst = self.to_list(self.files)
for file in lst:
node = self.path.find_source(file)
if not node: fatal('cannot find input file %s for processing' % file)
target = self.target
if not target or len(lst)>1: target = node.m_name
newnode = self.path.find_build(file+'.gz') #target?
if not newnode:
newnode = Node.Node(file+'.gz', self.path)
self.path.append_build(newnode)
task = self.create_task('copy', self.env, 8)
task.set_inputs(node)
task.set_outputs(newnode)
task.m_env = self.env
task.fun = self.fun
if Params.g_commands['install'] or Params.g_commands['uninstall']:
Common.install_files('MANDIR', 'man' + str(self.section), newnode.abspath(self.env))
def setup(env):
Object.register('man', manobj)
def detect(conf):
return 1
| import Common, Object, Utils, Node, Params
import sys, os
import gzip
from misc import copyobj
def gzip_func(task):
env = task.m_env
infile = task.m_inputs[0].abspath(env)
outfile = task.m_outputs[0].abspath(env)
input = open(infile, 'r')
output = gzip.GzipFile(outfile, mode='w')
output.write(input.read())
return 0
class manobj(copyobj):
def __init__(self, section=1, type='none'):
copyobj.__init__(self, type)
self.fun = gzip_func
self.files = []
self.section = section
def apply(self):
lst = self.to_list(self.source)
for file in lst:
node = self.path.find_source(file)
if not node: fatal('cannot find input file %s for processing' % file)
target = self.target
if not target or len(lst)>1: target = node.m_name
newnode = self.path.find_build(file+'.gz') #target?
if not newnode:
newnode = Node.Node(file+'.gz', self.path)
self.path.append_build(newnode)
task = self.create_task('copy', self.env, 8)
task.set_inputs(node)
task.set_outputs(newnode)
task.m_env = self.env
task.fun = self.fun
if Params.g_commands['install'] or Params.g_commands['uninstall']:
Common.install_files('MANDIR', 'man' + str(self.section), newnode.abspath(self.env))
def setup(env):
Object.register('man', manobj)
def detect(conf):
return 1
| lgpl-2.1 | Python |
735a52b8ad4ebf7b6b8bb47e14667cd9004e624b | add some mappings | gsathya/dsalgo,gsathya/dsalgo | algo/lru.py | algo/lru.py | mapping = {}
class Node:
def __init__(self, val):
self.next = None
self.prev = None
self.value = val
class DoublyLinkedList:
def __init__(self):
self.head = None
def insert(self, val):
node = Node(val)
mapping[val] = node
head = self.head
if self.head == None:
self.head = node
else:
while head.next != None:
head = head.next
head.next = node
node.prev = head
def print_list(self):
head = self.head
while head != None:
print head.value
head = head.next
if __name__ == '__main__':
dll = DoublyLinkedList()
for i in range(10):
dll.insert(i)
| class Node:
def __init__(self, val):
self.next = None
self.prev = None
self.value = val
class DoublyLinkedList:
def __init__(self):
self.head = None
def insert(self, val):
node = Node(val)
head = self.head
if self.head == None:
self.head = node
else:
while head.next != None:
head = head.next
head.next = node
node.prev = head
def print_list(self):
head = self.head
while head != None:
print head.value
head = head.next
if __name__ == '__main__':
dll = DoublyLinkedList()
for i in range(10):
dll.insert(i)
| mit | Python |
6bb58e13b657c1546f4f5d1afa70d48a9187f168 | Update server.py | volodink/itstime4science,volodink/itstime4science,volodink/itstime4science | gprs/server.py | gprs/server.py | from socket import *
from modules import decode_packet
import sys
from modules import params
Parser = params.Parser()
argv = Parser.createParser()
ip_and_port = argv.parse_args(sys.argv[1:])
#host = ip_and_port.ip
#port = int(ip_and_port.port)
host = "0.0.0.0"
port = 5100
addr = (host, port)
print(host,port)
tcp_socket = socket(AF_INET, SOCK_STREAM)
tcp_socket.bind(addr)
tcp_socket.listen(10)
loop = True
while loop:
data = None
print('wait connection...')
conn, addr = tcp_socket.accept()
while loop:
f = open('logs/gprs.log', 'a+')
data = conn.recv(109)
decode_packet.insert(data)
print(data)
if data:
f.write(str(data))
f.close()
else:
f.close()
break
conn.close()
tcp_socket.close()
| from socket import *
from modules import decode_packet
import sys
from modules import params
Parser = params.Parser()
argv = Parser.createParser()
ip_and_port = argv.parse_args(sys.argv[1:])
#host = ip_and_port.ip
#port = int(ip_and_port.port)
host = "0.0.0.0"
port = 5300
addr = (host, port)
print(host,port)
tcp_socket = socket(AF_INET, SOCK_STREAM)
tcp_socket.bind(addr)
tcp_socket.listen(10)
loop = True
while loop:
data = None
print('wait connection...')
conn, addr = tcp_socket.accept()
while loop:
f = open('logs/gprs.log', 'a+')
data = conn.recv(109)
decode_packet.insert(data)
print(data)
if data:
f.write(str(data))
f.close()
else:
f.close()
break
conn.close()
tcp_socket.close()
| mit | Python |
85775847e93b35ac19e09962bc2b10f9be666e33 | Update analysis.py with new finallist.py method | lukasschwab/MathIA | analysis.py | analysis.py | import random
import linecache
from unidecode import unidecode
# Process links into list
finallist = [None] * 5716809
with open('links-simple-sorted.txt', 'r') as src:
for line in src:
[oNode, dNode] = line.split(':')
finallist[int(oNode)] = dNode.rstrip('\n')[1:]
# ACTUALLY: pick a random line in links-sorted, and translate the numbers from there
# Get a random node, and pull that line from the links doc; want this to be an option
oNode = random.randint(1,5706070)
dNode = finallist[oNode]
dNode = dNode.split(' ')
# Translate these into titles and print the result
oname = linecache.getline('titles-sorted.txt',int(oNode))
oname = oname[:-1] # Gets rid of the trailing newline
print '\nORIGIN NODE: ' + oname + '\n'
print 'DESTINATION NODES:'
for thisnum in dNode:
dname = linecache.getline('titles-sorted.txt',int(thisnum))[:-1]
print ' ' + dname
print '\n' | import random
import linecache
from unidecode import unidecode
# ACTUALLY: pick a random line in links-sorted, and translate the numbers from there
# Get a random node, and pull that line from the links doc––want this to be an option
# Pull from links because some titles don't have link lines
lineno = random.randint(1,5706070)
linestr = linecache.getline('links-simple-sorted.txt',lineno)
# Process the string to split the "from" and "to" numbers
[origin, dest] = linestr.split(':')
dest = dest[1:-1] # Gets rid of the first space and trailing newline
dest = dest.split(' ') # Split at spaces
# Translate these into title
oname = lincache.getline('titles-sorted.txt',int(origin))
oname = oname[:-1] # Gets rid of the trailing newline
UNIoname = unidecode(u oname)
for thisnum in dest:
dname = linecache.getline('titles-sorted.txt',int(thisnum))[:-1]
UNIdname = unidecode(linecache.getline('titles-sorted.txt', int(thisnum))[:-1])
# Get some stats bro
linksout = len(dest)
# To get linksin need an adjacency matrix
def assemblematrix():
# Something with links-simple-sorted.txt
# Parse that shit in
def linksin(node):
# Locations of value "1" in the row int(node)
def linksout(node):
# Locations of value "1" in the col int(node)
| mit | Python |
6a3f0ade1d8fe16eeda6d339220b7ef877b402e5 | Add no-break options | KaiyiZhang/Secipt,KaiyiZhang/Secipt,KaiyiZhang/Secipt | LFI.TESTER.py | LFI.TESTER.py | '''
@KaiyiZhang Github
'''
import sys
import urllib2
import getopt
import time
target = ''
depth = 6
file = 'etc/passwd'
html = ''
prefix = ''
url = ''
keyword = 'root'
force = False
def usage():
print "LFI.Tester.py Help:"
print "Usage: LFI.TESTER.py -t [-d] [-f] [-k]"
print " -t,--target The test url"
print " -d,--depth The depth for test (Default is 6)"
print " -f,--file The File include (Default is etc/passwd)"
print " -k,--keyword the keyword for vuln check (Default is root)"
try:
if len(sys.argv) < 2:
usage()
sys.exit()
opts,args = getopt.getopt(sys.argv[1:],"ht:d:f:k:n",["help","target=","depth=","file=","keyword=","no-break"])
for opt, arg in opts:
if opt in("-h","--help"):
usage()
sys.exit()
if opt in("-t","--target"):
target = arg
if not target.startswith('http://', 0, 7):
target = 'http://' + target
if opt in("-d","--depth"):
depth = int(arg)
if depth < 1:
usage()
sys.exit()
if opt in("-f","--file"):
file = arg
if file.startswith('/',0,1):
file =file[1:]
if opt in("-k","--keyword"):
keyword = arg
#print keyword
if opt in("-n","--no-break"):
force = True
except getopt.GetoptError:
usage()
sys.exit(2)
for i in range(0,depth):
prefix += '../'
url = target + prefix + file
print "Testing: ",url
try:
response = urllib2.urlopen(url)
#print response.info()
html = response.read()
#print html
except:
pass
if keyword in html:
print url, " is Vulnerable"
if not force:
break
else:
continue
else:
time.sleep(2)
continue
| '''
@KaiyiZhang Github
'''
import sys
import urllib2
import getopt
import time
target = ''
depth = 6
file = 'etc/passwd'
html = ''
prefix = ''
url = ''
keyword='root'
def usage():
print "LFI.Tester.py Help:"
print "Usage: LFI.TESTER.py -t [-d] [-f] [-k]"
print " -t,--target The test url"
print " -d,--depth The depth for test (Default is 6)"
print " -f,--file The File include (Default is etc/passwd)"
print " -k,--keyword the keyword for vuln check (Default is root)"
try:
if len(sys.argv) < 2:
usage()
sys.exit()
opts,args = getopt.getopt(sys.argv[1:],"ht:d:f:k:",["help","target=","depth=","file=","keyword="])
for opt, arg in opts:
if opt in("-h","--help"):
usage()
sys.exit()
if opt in("-t","--target"):
target = arg
if not target.startswith('http://', 0, 7):
target = 'http://' + target
if opt in("-d","--depth"):
depth = int(arg)
if depth < 1:
usage()
sys.exit()
if opt in("-f","--file"):
file = arg
if file.startswith('/',0,1):
file =file[1:]
if opt in("-k","--keyword"):
keyword = arg
#print keyword
except getopt.GetoptError:
usage()
sys.exit(2)
for i in range(0,depth):
prefix += '../'
url = target + prefix + file
print "Testing: ",url
try:
response = urllib2.urlopen(url)
#print response.info()
html = response.read()
#print html
except:
pass
if(keyword in html):
print url, " is Vulnerable"
break
else:
time.sleep(2)
continue
| apache-2.0 | Python |
68c0c054e5b9874f8a6423c35fb83c9de351b9e0 | fix doc build | jbogaardt/chainladder-python,jbogaardt/chainladder-python | examples/plot_benktander.py | examples/plot_benktander.py | """
====================================================================
Benktander: Relationship between Chainladder and BornhuetterFerguson
====================================================================
This example demonstrates the relationship between the Chainladder and
BornhuetterFerguson methods by way fo the Benktander model. Each is a
special case of the Benktander model where ``n_iters = 1`` for BornhuetterFerguson
and as ``n_iters`` approaches infinity yields the chainladder. As ``n_iters``
increases the apriori selection becomes less relevant regardless of initial
choice.
"""
import chainladder as cl
# Load Data
clrd = cl.load_sample('clrd')
medmal_paid = clrd.groupby('LOB').sum().loc['medmal', 'CumPaidLoss']
medmal_prem = clrd.groupby('LOB').sum().loc['medmal', 'EarnedPremDIR'].latest_diagonal
# Generate LDFs and Tail Factor
medmal_paid = cl.Development().fit_transform(medmal_paid)
medmal_paid = cl.TailCurve().fit_transform(medmal_paid)
# Benktander Model
benk = cl.Benktander()
# Prep Benktander Grid Search with various assumptions, and a scoring function
param_grid = dict(n_iters=list(range(1,100,2)),
apriori=[0.50, 0.75, 1.00])
scoring = {'IBNR':lambda x: x.ibnr_.sum()}
grid = cl.GridSearch(benk, param_grid, scoring=scoring)
# Perform Grid Search
grid.fit(medmal_paid, sample_weight=medmal_prem)
# Plot data
grid.results_.pivot(index='n_iters', columns='apriori', values='IBNR').plot(
title='Benktander convergence to Chainladder', grid=True).set(ylabel='IBNR')
| """
====================================================================
Benktander: Relationship between Chainladder and BornhuetterFerguson
====================================================================
This example demonstrates the relationship between the Chainladder and
BornhuetterFerguson methods by way fo the Benktander model. Each is a
special case of the Benktander model where ``n_iters = 1`` for BornhuetterFerguson
and as ``n_iters`` approaches infinity yields the chainladder. As ``n_iters``
increases the apriori selection becomes less relevant regardless of initial
choice.
"""
import chainladder as cl
# Load Data
clrd = cl.load_sample('clrd')
medmal_paid = clrd.groupby('LOB').sum().loc['medmal', 'CumPaidLoss']
medmal_prem = clrd.groupby('LOB').sum().loc['medmal', 'EarnedPremDIR'].latest_diagonal
medmal_prem.rename('development', ['premium'])
# Generate LDFs and Tail Factor
medmal_paid = cl.Development().fit_transform(medmal_paid)
medmal_paid = cl.TailCurve().fit_transform(medmal_paid)
# Benktander Model
benk = cl.Benktander()
# Prep Benktander Grid Search with various assumptions, and a scoring function
param_grid = dict(n_iters=list(range(1,100,2)),
apriori=[0.50, 0.75, 1.00])
scoring = {'IBNR':lambda x: x.ibnr_.sum()}
grid = cl.GridSearch(benk, param_grid, scoring=scoring)
# Perform Grid Search
grid.fit(medmal_paid, sample_weight=medmal_prem)
# Plot data
grid.results_.pivot(index='n_iters', columns='apriori', values='IBNR').plot(
title='Benktander convergence to Chainladder', grid=True).set(ylabel='IBNR')
| mit | Python |
15307ebe2c19c1a3983b0894152ba81fdde34619 | Add comment on dist of first function | charanpald/tyre-hug | exp/descriptivestats.py | exp/descriptivestats.py | import pandas
import numpy
import matplotlib.pyplot as plt
def univariate_stats():
# Generate 1000 random numbers from a normal distribution
num_examples = 1000
z = pandas.Series(numpy.random.randn(num_examples))
# Minimum
print(z.min())
# Maximum
print(z.max())
# Mean
print(z.mean())
# Median
print(z.median())
# Variance
print(z.var())
# Standard deviation
print(z.std())
# Mean absolute deviation
print(z.mad())
# Interquartile range
print(z.quantile(0.75) - z.quantile(0.25))
z.plot(kind="hist")
def multivariate_stats():
num_examples = 1000
x = pandas.Series(numpy.random.randn(num_examples))
y = x + pandas.Series(numpy.random.randn(num_examples))
z = x + pandas.Series(numpy.random.randn(num_examples))
# Covariance
print(y.cov(z))
# Covariance of y with itself is equal to variance
print(y.cov(y), y.var())
# Correlation
print(y.corr(z))
univariate_stats()
multivariate_stats()
plt.show()
| import pandas
import numpy
import matplotlib.pyplot as plt
def univariate_stats():
num_examples = 1000
z = pandas.Series(numpy.random.randn(num_examples))
# Minimum
print(z.min())
# Maximum
print(z.max())
# Mean
print(z.mean())
# Median
print(z.median())
# Variance
print(z.var())
# Standard deviation
print(z.std())
# Mean absolute deviation
print(z.mad())
# Interquartile range
print(z.quantile(0.75) - z.quantile(0.25))
z.plot(kind="hist")
def multivariate_stats():
num_examples = 1000
x = pandas.Series(numpy.random.randn(num_examples))
y = x + pandas.Series(numpy.random.randn(num_examples))
z = x + pandas.Series(numpy.random.randn(num_examples))
# Covariance
print(y.cov(z))
# Covariance of y with itself is equal to variance
print(y.cov(y), y.var())
# Correlation
print(y.corr(z))
univariate_stats()
multivariate_stats()
plt.show()
| mit | Python |
9af7c8bfc22a250ce848d50ca26877e177f767c1 | Fix execution on Monday | flopezag/fiware-management-scripts,flopezag/fiware-management-scripts | management.py | management.py | from logging import _nameToLevel as nameToLevel
from argparse import ArgumentParser
from Common.emailer import Emailer
from DesksReminder.reminders import HelpDeskTechReminder, HelpDeskLabReminder, HelpDeskOtherReminder, \
UrgentDeskReminder, AccountsDeskReminder
from HelpDesk.synchronization import AskbotSync, HelpDeskCaretaker
from HelpDesk.stackoverflowsync import StackOverflowSync
from urllib3 import disable_warnings
from urllib3.exceptions import InsecureRequestWarning
from datetime import datetime
__author__ = 'Fernando López'
__version__ = "1.3.0"
def init():
parser = ArgumentParser(prog='Jira Management Scripts', description='')
parser.add_argument('-l',
'--log',
default='INFO',
help='The logging level to be used.')
args = parser.parse_args()
loglevel = None
try:
loglevel = nameToLevel[args.log.upper()]
except Exception as e:
print('Invalid log level: {}'.format(args.log))
print('Please use one of the following values:')
print(' * CRITICAL')
print(' * ERROR')
print(' * WARNING')
print(' * INFO')
print(' * DEBUG')
print(' * NOTSET')
exit()
return loglevel
if __name__ == "__main__":
loglevel = init()
mailer = Emailer(loglevel=loglevel)
disable_warnings(InsecureRequestWarning)
today = datetime.today().weekday()
if today == 0:
# Send reminder of pending JIRA tickets, only every Mondays
techReminder = HelpDeskTechReminder(loglevel=loglevel, mailer=mailer)
techReminder.process()
labReminder = HelpDeskLabReminder(loglevel=loglevel, mailer=mailer)
labReminder.process()
otherReminder = HelpDeskOtherReminder(loglevel=loglevel, mailer=mailer)
otherReminder.process()
urgentReminder = UrgentDeskReminder(loglevel=loglevel, mailer=mailer)
urgentReminder.process()
accountReminder = AccountsDeskReminder(loglevel=loglevel, mailer=mailer)
accountReminder.process()
# Askbot synchronization and Jira caretaker actions, every day
askbotSync = AskbotSync(loglevel=loglevel)
askbotSync.process()
# Automatic reassign tickets to owners based on some extracted information, every day
helpdeskCaretaker = HelpDeskCaretaker(loglevel=loglevel)
helpdeskCaretaker.process()
# StackoverFlow synchronization, every day
stackoverflowSync = StackOverflowSync(loglevel=loglevel)
stackoverflowSync.process(year=2015, month=9, day=21)
| from logging import _nameToLevel as nameToLevel
from argparse import ArgumentParser
from Common.emailer import Emailer
from DesksReminder.reminders import HelpDeskTechReminder, HelpDeskLabReminder, HelpDeskOtherReminder, \
UrgentDeskReminder, AccountsDeskReminder
from HelpDesk.synchronization import AskbotSync, HelpDeskCaretaker
from HelpDesk.stackoverflowsync import StackOverflowSync
from urllib3 import disable_warnings
from urllib3.exceptions import InsecureRequestWarning
from datetime import datetime
__author__ = 'Fernando López'
__version__ = "1.3.0"
def init():
parser = ArgumentParser(prog='Jira Management Scripts', description='')
parser.add_argument('-l',
'--log',
default='INFO',
help='The logging level to be used.')
args = parser.parse_args()
loglevel = None
try:
loglevel = nameToLevel[args.log.upper()]
except Exception as e:
print('Invalid log level: {}'.format(args.log))
print('Please use one of the following values:')
print(' * CRITICAL')
print(' * ERROR')
print(' * WARNING')
print(' * INFO')
print(' * DEBUG')
print(' * NOTSET')
exit()
return loglevel
if __name__ == "__main__":
loglevel = init()
mailer = Emailer(loglevel=loglevel)
disable_warnings(InsecureRequestWarning)
today = datetime.today().weekday()
if today == 2:
# Send reminder of pending JIRA tickets, only every Mondays
techReminder = HelpDeskTechReminder(loglevel=loglevel, mailer=mailer)
techReminder.process()
labReminder = HelpDeskLabReminder(loglevel=loglevel, mailer=mailer)
labReminder.process()
otherReminder = HelpDeskOtherReminder(loglevel=loglevel, mailer=mailer)
otherReminder.process()
urgentReminder = UrgentDeskReminder(loglevel=loglevel, mailer=mailer)
urgentReminder.process()
accountReminder = AccountsDeskReminder(loglevel=loglevel, mailer=mailer)
accountReminder.process()
# Askbot synchronization and Jira caretaker actions, every day
askbotSync = AskbotSync(loglevel=loglevel)
askbotSync.process()
# Automatic reassign tickets to owners based on some extracted information, every day
helpdeskCaretaker = HelpDeskCaretaker(loglevel=loglevel)
helpdeskCaretaker.process()
# StackoverFlow synchronization, every day
stackoverflowSync = StackOverflowSync(loglevel=loglevel)
stackoverflowSync.process(year=2015, month=9, day=21)
| apache-2.0 | Python |
ecd2821a99dee895f3ab7c5dbcc6d86983268560 | Update src url for dev in views | patrickbeeson/text-me | __init__.py | __init__.py | from flask import Flask, request, redirect, url_for
from twilio.rest import TwilioRestClient
from PIL import Image, ImageDraw, ImageFont
import time
app = Flask(__name__, static_folder='static', static_url_path='')
client = TwilioRestClient(
account='ACb01b4d6edfb1b41a8b80f5fed2c19d1a',
token='97e6b9c0074b2761eff1375fb088adda'
)
@app.route('/', methods=['GET', 'POST'])
def send_image():
if request.method == 'GET':
return 'The deployment worked! Now copy your browser URL into the' + \
' Twilio message text box for your phone number.'
sender_number = request.form.get('From', '')
twilio_number = request.form.get('To', '')
user_text = request.form.get('Body', '')
image_url, msg_text = mod_photo(user_text)
send_mms_twiml(image_url, msg_text, sender_number, twilio_number)
return 'ok'
def mod_photo(user_text):
base = Image.open('static/images/original/portland.jpg').convert('RGBA')
txt = Image.new('RGBA', base.size, (255, 255, 255, 0))
fnt = ImageFont.truetype('static/fonts/Gobold.ttf', 30)
d = ImageDraw.Draw(txt)
d.text(
(25, 25),
'{}...'.format(user_text),
font=fnt,
fill=(255, 255, 255, 255)
)
image = Image.alpha_composite(base, txt)
image.save('static/images/changed/portland_{}.jpg'.format(user_text))
try:
msg_text = '{}: Imagine yourself in Portland!'.format(user_text)
image_url = 'http://dev.thevariable.com/images/changed/portland_{}.jpg'.format(user_text)
except:
msg = "Sorry, we couldn't pull a kitten, " + \
"here's a dinosaur instead!"
image_url = "https://farm1.staticflickr.com/46/" + \
"154877897_a299d80baa_b_d.jpg"
return image_url, msg_text
def send_mms_twiml(image_url, msg_text, sender_number, twilio_number):
client.messages.create(
to=sender_number,
from_=twilio_number,
body=msg_text,
media_url=image_url
)
if __name__ == "__main__":
app.run(debug=True)
| from flask import Flask, request, redirect, url_for
from twilio.rest import TwilioRestClient
from PIL import Image, ImageDraw, ImageFont
import time
app = Flask(__name__, static_folder='static', static_url_path='')
client = TwilioRestClient(
account='ACb01b4d6edfb1b41a8b80f5fed2c19d1a',
token='97e6b9c0074b2761eff1375fb088adda'
)
@app.route('/', methods=['GET', 'POST'])
def send_image():
if request.method == 'GET':
return 'The deployment worked! Now copy your browser URL into the' + \
' Twilio message text box for your phone number.'
sender_number = request.form.get('From', '')
twilio_number = request.form.get('To', '')
user_text = request.form.get('Body', '')
image_url, msg_text = mod_photo(user_text)
send_mms_twiml(image_url, msg_text, sender_number, twilio_number)
return 'ok'
def mod_photo(user_text):
base = Image.open('static/images/original/portland.jpg').convert('RGBA')
txt = Image.new('RGBA', base.size, (255, 255, 255, 0))
fnt = ImageFont.truetype('static/fonts/Gobold.ttf', 30)
d = ImageDraw.Draw(txt)
d.text(
(25, 25),
'{}...'.format(user_text),
font=fnt,
fill=(255, 255, 255, 255)
)
image = Image.alpha_composite(base, txt)
image.save('static/images/changed/portland_{}.jpg'.format(user_text))
try:
msg_text = '{}: Imagine yourself in Portland!'.format(user_text)
image_url = 'http://12dcb913.ngrok.com/images/changed/portland_{}.jpg'.format(user_text)
except:
msg = "Sorry, we couldn't pull a kitten, " + \
"here's a dinosaur instead!"
image_url = "https://farm1.staticflickr.com/46/" + \
"154877897_a299d80baa_b_d.jpg"
return image_url, msg_text
def send_mms_twiml(image_url, msg_text, sender_number, twilio_number):
client.messages.create(
to=sender_number,
from_=twilio_number,
body=msg_text,
media_url=image_url
)
if __name__ == "__main__":
app.run(debug=True)
| mit | Python |
598bb39414825ff8ab561babb470b85f06c58020 | Update __init__.py | scipsycho/mlpack | __init__.py | __init__.py | from mlpack.linear_regression import linear_regression
from mlpack.logistic_regression import logistic_regression
"""
MlPack
======
Provides
1. A Variety of Machine learning packages
2. Good and Easy hand written programs with good documentation
3. Linear Regression, Logistic Regression
Available subpackages
---------------------
1. Linear Regression
2. Logistic Regression
See subpackages for more details.
"""
| from mlpack import linear_regression
from mlpack import logistic_regression
"""
MlPack
======
Provides
1. A Variety of Machine learning packages
2. Good and Easy hand written programs with good documentation
3. Linear Regression, Logistic Regression
Available subpackages
---------------------
1. Linear Regression
2. Logistic Regression
See subpackages for more details.
"""
| mit | Python |
b8d0344f0ca5c906e43d4071bc27a8d2acf114d1 | bump version | wistful/webmpris | webmpris/__init__.py | webmpris/__init__.py | __version__ = '1.1'
__description__ = 'REST API to control media players via MPRIS2 interfaces'
requires = [
'pympris'
]
README = """webmpris is a REST API
to control media players via MPRIS2 interfaces.
Supported intefaces:
org.mpris.MediaPlayer2 via /players/<id>/Root
org.mpris.MediaPlayer2.Player via /players/<id>/Player
org.mpris.MediaPlayer2.TrackList via /players/<id>/TrackList
org.mpris.MediaPlayer2.Playlists via /players/<id>/Playlists
"""
| __version__ = '1.0'
__description__ = 'REST API to control media players via MPRIS2 interfaces'
requires = [
'pympris'
]
README = """webmpris is a REST API
to control media players via MPRIS2 interfaces.
Supported intefaces:
org.mpris.MediaPlayer2 via /players/<id>/Root
org.mpris.MediaPlayer2.Player via /players/<id>/Player
org.mpris.MediaPlayer2.TrackList via /players/<id>/TrackList
org.mpris.MediaPlayer2.Playlists via /players/<id>/Playlists
"""
| mit | Python |
9acf7857167bb87438c7c0bebca1a7eda93ac23b | Make saml2idp compatible with Django 1.9 | mobify/dj-saml-idp,mobify/dj-saml-idp,mobify/dj-saml-idp | saml2idp/registry.py | saml2idp/registry.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
"""
Registers and loads Processor classes from settings.
"""
import logging
from importlib import import_module
from django.core.exceptions import ImproperlyConfigured
from . import exceptions
from . import saml2idp_metadata
logger = logging.getLogger(__name__)
def get_processor(config):
"""
Get an instance of the processor with config.
"""
dottedpath = config['processor']
try:
dot = dottedpath.rindex('.')
except ValueError:
raise ImproperlyConfigured('%s isn\'t a processors module' % dottedpath)
sp_module, sp_classname = dottedpath[:dot], dottedpath[dot+1:]
try:
mod = import_module(sp_module)
except ImportError, e:
raise ImproperlyConfigured('Error importing processors %s: "%s"' % (sp_module, e))
try:
sp_class = getattr(mod, sp_classname)
except AttributeError:
raise ImproperlyConfigured('processors module "%s" does not define a "%s" class' % (sp_module, sp_classname))
instance = sp_class(config)
return instance
def find_processor(request):
"""
Returns the Processor instance that is willing to handle this request.
"""
for name, sp_config in saml2idp_metadata.SAML2IDP_REMOTES.items():
proc = get_processor(sp_config)
try:
if proc.can_handle(request):
return proc
except exceptions.CannotHandleAssertion as exc:
# Log these, but keep looking.
logger.debug('%s %s' % (proc, exc))
raise exceptions.CannotHandleAssertion('None of the processors in SAML2IDP_REMOTES could handle this request.')
| # -*- coding: utf-8 -*-
from __future__ import absolute_import
"""
Registers and loads Processor classes from settings.
"""
# Python imports
import logging
# Django imports
from django.utils.importlib import import_module
from django.core.exceptions import ImproperlyConfigured
# Local imports
from . import exceptions
from . import saml2idp_metadata
# Setup logging
logger = logging.getLogger(__name__)
def get_processor(config):
"""
Get an instance of the processor with config.
"""
dottedpath = config['processor']
try:
dot = dottedpath.rindex('.')
except ValueError:
raise ImproperlyConfigured('%s isn\'t a processors module' % dottedpath)
sp_module, sp_classname = dottedpath[:dot], dottedpath[dot+1:]
try:
mod = import_module(sp_module)
except ImportError, e:
raise ImproperlyConfigured('Error importing processors %s: "%s"' % (sp_module, e))
try:
sp_class = getattr(mod, sp_classname)
except AttributeError:
raise ImproperlyConfigured('processors module "%s" does not define a "%s" class' % (sp_module, sp_classname))
instance = sp_class(config)
return instance
def find_processor(request):
"""
Returns the Processor instance that is willing to handle this request.
"""
for name, sp_config in saml2idp_metadata.SAML2IDP_REMOTES.items():
proc = get_processor(sp_config)
try:
if proc.can_handle(request):
return proc
except exceptions.CannotHandleAssertion as exc:
# Log these, but keep looking.
logger.debug('%s %s' % (proc, exc))
raise exceptions.CannotHandleAssertion('None of the processors in SAML2IDP_REMOTES could handle this request.')
| mit | Python |
b8cd1b6869651cd0cbe2cbeebc59c641f13e0e5b | Add todo for scopes permissions | polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon | polyaxon/scopes/permissions/scopes.py | polyaxon/scopes/permissions/scopes.py | from scopes.authentication.ephemeral import is_ephemeral_user
from scopes.authentication.internal import is_internal_user
from scopes.permissions.base import PolyaxonPermission
class ScopesPermission(PolyaxonPermission):
"""
Scopes based Permissions, depends on the authentication backend.
"""
ENTITY = None
SCOPE_MAPPING = None
@staticmethod
def _check_internal_or_ephemeral(request):
return any([is_ephemeral_user(request.user), is_internal_user(request.user)])
def has_permission(self, request, view):
if not request.auth:
if not request.user.is_authenticated:
return False
# Session users are granted total access
return True
# TODO Add internal/ephemeral here
# (if that type of auth is allowed, then we should not check he scope)
if request.user.is_authenticated and request.user.is_superuser:
return True
allowed_scopes = set(self.SCOPE_MAPPING.get(request.method, []))
if not allowed_scopes:
return True
current_scopes = request.auth.scopes
return any(s in allowed_scopes for s in current_scopes)
| from scopes.authentication.ephemeral import is_ephemeral_user
from scopes.authentication.internal import is_internal_user
from scopes.permissions.base import PolyaxonPermission
class ScopesPermission(PolyaxonPermission):
"""
Scopes based Permissions, depends on the authentication backend.
"""
ENTITY = None
SCOPE_MAPPING = None
@staticmethod
def _check_internal_or_ephemeral(request):
return any([is_ephemeral_user(request.user), is_internal_user(request.user)])
def has_permission(self, request, view):
if not request.auth:
if not request.user.is_authenticated:
return False
# Session users are granted total access
return True
if request.user.is_authenticated and request.user.is_superuser:
return True
allowed_scopes = set(self.SCOPE_MAPPING.get(request.method, []))
if not allowed_scopes:
return True
current_scopes = request.auth.scopes
return any(s in allowed_scopes for s in current_scopes)
| apache-2.0 | Python |
c202a3a945453a4955f0acbf369227f8c9cee148 | Rename link in init | analysiscenter/dataset | __init__.py | __init__.py | import os
from .batchflow import *
__path__ = [os.path.join(os.path.dirname(__file__), 'batchflow')]
| import os
from .dataset import *
__path__ = [os.path.join(os.path.dirname(__file__), 'dataset')]
| apache-2.0 | Python |
4a4731eda22170a77bb24dd3c7fc8ff4cafecf9d | bump version to 2.7b1 | pypa/setuptools,pypa/setuptools,pypa/setuptools | __init__.py | __init__.py | """distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
__revision__ = "$Id$"
# Distutils version
#
# Updated automatically by the Python release process.
#
#--start constants--
__version__ = "2.7b1"
#--end constants--
| """distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
__revision__ = "$Id$"
# Distutils version
#
# Updated automatically by the Python release process.
#
#--start constants--
__version__ = "2.7a4"
#--end constants--
| mit | Python |
86eb16da4a6c3579eb514fa5ca73def7be8afd84 | Add noqa codestyle | GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek,makinacorpus/Geotrek | geotrek/api/v2/views/__init__.py | geotrek/api/v2/views/__init__.py | from rest_framework import response, permissions
from rest_framework.views import APIView
from django.conf import settings
from django.contrib.gis.geos import Polygon
from .authent import StructureViewSet # noqa
from .common import TargetPortalViewSet, ThemeViewSet, SourceViewSet, ReservationSystemViewSet, LabelViewSet, OrganismViewSet # noqa
if 'geotrek.core' in settings.INSTALLED_APPS:
from .core import PathViewSet # noqa
if 'geotrek.feedback' in settings.INSTALLED_APPS:
from .feedback import ReportStatusViewSet, ReportActivityViewSet, ReportCategoryViewSet, ReportProblemMagnitudeViewSet # noqa
if 'geotrek.trekking' in settings.INSTALLED_APPS:
from .trekking import (TrekViewSet, TourViewSet, POIViewSet, POITypeViewSet, AccessibilityViewSet, RouteViewSet, # noqa
DifficultyViewSet, NetworkViewSet, PracticeViewSet, # noqa
WebLinkCategoryViewSet, ServiceTypeViewSet, ServiceViewSet, TrekRatingScaleViewSet, TrekRatingViewSet) # noqa
if 'geotrek.sensitivity' in settings.INSTALLED_APPS:
from .sensitivity import SensitiveAreaViewSet # noqa
from .sensitivity import SportPracticeViewSet # noqa
from .sensitivity import SpeciesViewSet # noqa
if 'geotrek.tourism' in settings.INSTALLED_APPS:
from .tourism import TouristicContentViewSet, TouristicEventViewSet, TouristicEventTypeViewSet, InformationDeskViewSet, TouristicContentCategoryViewSet # noqa
if 'geotrek.zoning' in settings.INSTALLED_APPS:
from .zoning import CityViewSet, DistrictViewSet # noqa
if 'geotrek.outdoor' in settings.INSTALLED_APPS:
from .outdoor import (SiteViewSet, OutdoorPracticeViewSet, SiteTypeViewSet, CourseTypeViewSet, # noqa
OutdoorRatingScaleViewSet, OutdoorRatingViewSet, CourseViewSet, SectorViewSet) # noqa
if 'geotrek.flatpages' in settings.INSTALLED_APPS:
from .flatpages import FlatPageViewSet # noqa
if 'geotrek.infrastructure' in settings.INSTALLED_APPS:
from .infrastructure import InfrastructureTypeViewSet, InfrastructureViewSet, InfrastructureUsageDifficultyLevelViewSet, InfrastructureConditionViewSet, InfrastructureMaintenanceDifficultyLevelViewSet # noqa
if 'geotrek.signage' in settings.INSTALLED_APPS:
from .signage import SignageViewSet, SignageTypeViewSet, SealingViewSet, ColorViewSet, DirectionViewSet, BladeTypeViewSet # noqa
if 'drf_yasg' in settings.INSTALLED_APPS:
from .swagger import schema_view # noqa
class ConfigView(APIView):
"""
Configuration endpoint that gives the BBox used in the Geotrek configuration
"""
permission_classes = [permissions.AllowAny, ]
def get(self, request, *args, **kwargs):
bbox = Polygon.from_bbox(settings.SPATIAL_EXTENT)
bbox.srid = settings.SRID
bbox.transform(settings.API_SRID)
return response.Response({
'bbox': bbox.extent
})
| from rest_framework import response, permissions
from rest_framework.views import APIView
from django.conf import settings
from django.contrib.gis.geos import Polygon
from .authent import StructureViewSet # noqa
from .common import TargetPortalViewSet, ThemeViewSet, SourceViewSet, ReservationSystemViewSet, LabelViewSet, OrganismViewSet # noqa
if 'geotrek.core' in settings.INSTALLED_APPS:
from .core import PathViewSet # noqa
if 'geotrek.feedback' in settings.INSTALLED_APPS:
from .feedback import ReportStatusViewSet, ReportActivityViewSet, ReportCategoryViewSet, ReportProblemMagnitudeViewSet # noqa
if 'geotrek.trekking' in settings.INSTALLED_APPS:
from .trekking import (TrekViewSet, TourViewSet, POIViewSet, POITypeViewSet, AccessibilityViewSet, RouteViewSet,
DifficultyViewSet, NetworkViewSet, PracticeViewSet,
WebLinkCategoryViewSet, ServiceTypeViewSet, ServiceViewSet, TrekRatingScaleViewSet, TrekRatingViewSet) # noqa
if 'geotrek.sensitivity' in settings.INSTALLED_APPS:
from .sensitivity import SensitiveAreaViewSet # noqa
from .sensitivity import SportPracticeViewSet # noqa
from .sensitivity import SpeciesViewSet # noqa
if 'geotrek.tourism' in settings.INSTALLED_APPS:
from .tourism import TouristicContentViewSet, TouristicEventViewSet, TouristicEventTypeViewSet, InformationDeskViewSet, TouristicContentCategoryViewSet # noqa
if 'geotrek.zoning' in settings.INSTALLED_APPS:
from .zoning import CityViewSet, DistrictViewSet # noqa
if 'geotrek.outdoor' in settings.INSTALLED_APPS:
from .outdoor import (SiteViewSet, OutdoorPracticeViewSet, SiteTypeViewSet, CourseTypeViewSet,
OutdoorRatingScaleViewSet, OutdoorRatingViewSet, CourseViewSet, SectorViewSet) # noqa
if 'geotrek.flatpages' in settings.INSTALLED_APPS:
from .flatpages import FlatPageViewSet # noqa
if 'geotrek.infrastructure' in settings.INSTALLED_APPS:
from .infrastructure import InfrastructureTypeViewSet, InfrastructureViewSet, InfrastructureUsageDifficultyLevelViewSet, InfrastructureConditionViewSet, InfrastructureMaintenanceDifficultyLevelViewSet # noqa
if 'geotrek.signage' in settings.INSTALLED_APPS:
from .signage import SignageViewSet, SignageTypeViewSet, SealingViewSet, ColorViewSet, DirectionViewSet, BladeTypeViewSet # noqa
if 'drf_yasg' in settings.INSTALLED_APPS:
from .swagger import schema_view # noqa
class ConfigView(APIView):
"""
Configuration endpoint that gives the BBox used in the Geotrek configuration
"""
permission_classes = [permissions.AllowAny, ]
def get(self, request, *args, **kwargs):
bbox = Polygon.from_bbox(settings.SPATIAL_EXTENT)
bbox.srid = settings.SRID
bbox.transform(settings.API_SRID)
return response.Response({
'bbox': bbox.extent
})
| bsd-2-clause | Python |
f9a1da6e60bfbd9c9e5be769f1223d628cec6481 | set the module version | brain-tec/connector,acsone/connector,hugosantosred/connector,brain-tec/connector,anybox/connector,Endika/connector,sylvain-garancher/connector,gurneyalex/connector,maljac/connector,maljac/connector,mohamedhagag/connector,esousy/connector,MindAndGo/connector,open-synergy/connector,acsone/connector,dvitme/connector,MindAndGo/connector,Antiun/connector,BT-ojossen/connector,fevxie/connector,js-landoo/connector,zhaohuaw/connector,BT-jmichaud/connector,fevxie/connector,mohamedhagag/connector,sylvain-garancher/connector,hugosantosred/connector,guewen/connector,Endika/connector,open-synergy/connector,BT-fgarbely/connector,js-landoo/connector,anybox/connector,acsone/connector,brain-tec/connector,BT-jmichaud/connector,esousy/connector,Antiun/connector,zhaohuaw/connector,guewen/connector,gurneyalex/connector,dvitme/connector,BT-ojossen/connector,BT-fgarbely/connector | base_external_referentials/__openerp__.py | base_external_referentials/__openerp__.py | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2009 Akretion (<http://www.akretion.com>). All Rights Reserved
# authors: Raphaël Valyi, Sharoon Thomas
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Base External Referentials',
'version': '6.1.0',
'category': 'Generic Modules/Base',
'description': """
Definition : a referential is an external system that will interacts with OpenERP
Goal : store external system connection details and objects fields mapping
This module provide an abstract common minimal base to add any additional external id columns
to some OpenObject table, pointing to some external referential.
A referential is abstract and minimal at this stage, it only has:
* a name
* a location (possibly webservice URL, database connection URL...); the connection method will tell it...
* referential credentials (user name + password)
* placeholders for custom in and out mapping for OpenERP object fields.
OpenERP already has limited supported to external ids using the ir_model_data and the id
fields in the loaded data such as XML or CSV. We think that's OK to store all referential ids
into the same ir_model_data table: yes it makes it large, but synchronisation operations involve
a network bottleneck anyway, so it's largely OK and negligible to have a large table here.
The existing ir_model_data feature of OpenERP is mostly thought as an mono-external referential
(even if the module key of ir_model_data plays some referential scoping role). Here we just push
the concept further to assume multiple external ids for OpenERP entities and add the possibility
to customize their field mapping directly in OpenERP to accomodate the external systems.
""",
'author': 'Raphaël Valyi (Akretion.com), Sharoon Thomas (Openlabs.co.in)',
'website': 'http://www.akretion.com, http://openlabs.co.in/',
'depends': ['base','base_pop_up', 'base_file_protocole', 'email_template'],
'init_xml': [],
'update_xml': [
'external_referentials_view.xml',
'report_view.xml',
'external_referentials_menu.xml',
'security/ir.model.access.csv',
'group_fields_view.xml',
'security/base_external_referentials_security.xml',
'report_mail_template.xml',
],
'demo_xml': [],
'installable': True,
'certificate': '',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2009 Akretion (<http://www.akretion.com>). All Rights Reserved
# authors: Raphaël Valyi, Sharoon Thomas
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Base External Referentials',
'version': '1.0',
'category': 'Generic Modules/Base',
'description': """
Definition : a referential is an external system that will interacts with OpenERP
Goal : store external system connection details and objects fields mapping
This module provide an abstract common minimal base to add any additional external id columns
to some OpenObject table, pointing to some external referential.
A referential is abstract and minimal at this stage, it only has:
* a name
* a location (possibly webservice URL, database connection URL...); the connection method will tell it...
* referential credentials (user name + password)
* placeholders for custom in and out mapping for OpenERP object fields.
OpenERP already has limited supported to external ids using the ir_model_data and the id
fields in the loaded data such as XML or CSV. We think that's OK to store all referential ids
into the same ir_model_data table: yes it makes it large, but synchronisation operations involve
a network bottleneck anyway, so it's largely OK and negligible to have a large table here.
The existing ir_model_data feature of OpenERP is mostly thought as an mono-external referential
(even if the module key of ir_model_data plays some referential scoping role). Here we just push
the concept further to assume multiple external ids for OpenERP entities and add the possibility
to customize their field mapping directly in OpenERP to accomodate the external systems.
""",
'author': 'Raphaël Valyi (Akretion.com), Sharoon Thomas (Openlabs.co.in)',
'website': 'http://www.akretion.com, http://openlabs.co.in/',
'depends': ['base','base_pop_up', 'base_file_protocole', 'email_template'],
'init_xml': [],
'update_xml': [
'external_referentials_view.xml',
'report_view.xml',
'external_referentials_menu.xml',
'security/ir.model.access.csv',
'group_fields_view.xml',
'security/base_external_referentials_security.xml',
'report_mail_template.xml',
],
'demo_xml': [],
'installable': True,
'certificate': '',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | Python |
6eeb2b4f79c2f735552cf7c061b48425d3299e51 | Use argparse. | nbeaver/equajson | validate_equajson.py | validate_equajson.py | #! /usr/bin/env python3
import json
import jsonschema
import sys
import os
import argparse
def main(equajson_path, schema_path):
global filepath
filepath = equajson_path
with open(schema_path) as schema_file:
try:
equajson_schema = json.load(schema_file)
except:
sys.stderr.write("Invalid JSON in schema: `"+schema_file.name+"'"+'\n')
raise
with open(equajson_path) as json_file:
try:
equajson = json.load(json_file)
except:
sys.stderr.write("Invalid JSON in file: `"+json_file.name+"'"+'\n')
raise
try:
jsonschema.validate(equajson, equajson_schema)
except jsonschema.exceptions.ValidationError:
sys.stderr.write(json_file.name+'\n')
raise
basename_no_extension = os.path.splitext(os.path.basename(json_file.name))[0]
# It's easier to make this a global variable
# than to thread it through every function.
filepath = None
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='validate equajson files')
parser.add_argument(
'-s',
'--schema',
help='path to schema file',
required=True
)
parser.add_argument(
'json_file',
help='path to json file to validate'
)
args = parser.parse_args()
main(args.json_file, args.schema)
| #! /usr/bin/env python3
import json
import jsonschema
import sys
import os
def main(equajson_path, schema_path):
global filepath
filepath = equajson_path
with open(schema_path) as schema_file:
try:
equajson_schema = json.load(schema_file)
except:
sys.stderr.write("Invalid JSON in schema: `"+schema_file.name+"'"+'\n')
raise
with open(equajson_path) as json_file:
try:
equajson = json.load(json_file)
except:
sys.stderr.write("Invalid JSON in file: `"+json_file.name+"'"+'\n')
raise
try:
jsonschema.validate(equajson, equajson_schema)
except jsonschema.exceptions.ValidationError:
sys.stderr.write(json_file.name+'\n')
raise
basename_no_extension = os.path.splitext(os.path.basename(json_file.name))[0]
# It's easier to make this a global variable
# than to thread it through every function.
filepath = None
if __name__ == '__main__':
num_args = len(sys.argv) - 1
if num_args != 2:
sys.stderr.write("Usage: python "+sys.argv[0]+" equajson.json schema.json"+'\n')
sys.exit(1)
main(sys.argv[1], sys.argv[2])
| mit | Python |
e6cb1617e588d6b276fe01c401f2c1b34cf88d5f | fix stuff | dborstelmann/Penguins-GH6,dborstelmann/Penguins-GH6,dborstelmann/Penguins-GH6 | api/read.py | api/read.py | import datetime
from django.http import JsonResponse
from dateutil.parser import parse
from django.contrib.auth.decorators import login_required
from api.models import ( Applicant, Client, Disabilities, EmploymentEducation,
Enrollment, HealthAndDV, IncomeBenefits, Services )
def get_applicants(request):
applicant = {}
return JsonResponse(applicant)
def search_clients(request):
'''
request.POST =
query
'''
clients = Client.objects.all()
if 'query' in request.POST:
q = request.POST['query']
if q.isdigit():
clients = clients.filter(uuid=q)
else:
clients = clients.filter(last_name__contains=q)
return JsonResponse([{
"first_name": c.first_name,
"middle_name": c.middle_name,
"last_name": c.last_name,
"social_security": c.social_security,
"date_of_birth": datetime.datetime.strftime(c.date_of_birth, '%m/%d/%Y'),
"ethnicity": 1,
"gender": 1,
"veteran": 1,
"year_entered": c.year_entered,
"year_exited": c.year_exited,
"date_created": c.date_created
} for c in clients], safe=False)
def get_applicants(request):
app_list = Applicant.objects.all()
applicant = [{
"id": c.id,
"first_name": c.first_name,
"last_name": c.last_name,
"why": c.why,
"phone": c.phone,
"email": c.emial,
"address": c.address,
"birthday": c.birthday,
"ethnicity": value_maps.ethnicity[c.ethnicity],
"gender": value_maps.gender[c.gender],
"veteran": value_maps.veteran[c.veteran],
"family": c.family,
"domestic_violence": value_maps.domestic_violence[c.domestic_violence],
"pregnancy": c.pregnancy,
"drug": c.drug,
"urgency": c.urgency,
"created": c.created,
"reviewed": c.reviewed,
} for c in app_list]
return JsonResponse(applicant, safe=False)
| import datetime
from django.http import JsonResponse
from dateutil.parser import parse
from django.contrib.auth.decorators import login_required
from api.models import ( Applicant, Client, Disabilities, EmploymentEducation,
Enrollment, HealthAndDV, IncomeBenefits, Services )
def get_applicants(request):
applicant = {}
return JsonResponse(applicant)
def search_clients(request):
'''
request.POST =
query
'''
clients = Client.objects.all()
if 'query' in request.POST:
q = request.POST['query']
if q.isdigit():
clients = clients.filter(uuid=q)
else:
clients = clients.filter(last_name__contains=q)
return JsonResponse([{
"first_name": c.first_name,
"middle_name": c.middle_name,
"last_name": c.last_name,
"social_security": c.social_security,
"date_of_birth": datetime.datetime.strftime(c.date_of_birth, '%m/%d/%Y'),
"ethnicity": 1,
"gender": 1,
"veteran": 1,
"year_entered": c.year_entered,
"year_exited": c.year_exited,
"date_created": c.date_created
} for c in clients], safe=False)
<<<<<<< Updated upstream
def get_applicants(request):
app_list = Applicant.objects.all()
applicant = [{
"id": c.id,
"first_name": c.first_name,
"last_name": c.last_name,
"why": c.why,
"phone": c.phone,
"email": c.emial,
"address": c.address,
"birthday": c.birthday,
"ethnicity": value_maps.ethnicity[c.ethnicity],
"gender": value_maps.gender[c.gender],
"veteran": value_maps.veteran[c.veteran],
"family": c.family,
"domestic_violence": value_maps.domestic_violence[c.domestic_violence],
"pregnancy": c.pregnancy,
"drug": c.drug,
"urgency": c.urgency,
"created": c.created,
"reviewed": c.reviewed,
} for c in app_list]
return JsonResponse(applicant, safe=False)
| mit | Python |
ae7b583cab8d38b04ce57571f50221b4a2e429f6 | Update base.py | raiderrobert/django-webhook | webhook/base.py | webhook/base.py | """
Base webhook implementation
"""
import json
from django.http import HttpResponse
from django.views.generic import View
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
class WebhookBase(View):
"""
Simple Webhook base class to handle the most standard case.
"""
@method_decorator(csrf_exempt)
def dispatch(self, request, *args, **kwargs):
return super(WebhookBase, self).dispatch(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
data = json.loads(request.body.decode('utf-8'))
self.process_webhook(data)
return HttpResponse(status=200)
def process_webhook(self, data=None):
"""
Unimplemented method
"""
raise NotImplementedError
| """
Base webhook implementation
"""
import json
from django.http import HttpResponse
from django.views.generic import View
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
class WebhookBase(View):
"""
Simple Webhook base class to handle the most standard case.
"""
@method_decorator(csrf_exempt)
def dispatch(self, request, *args, **kwargs):
return super(WebhookBase, self).dispatch(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
data = json.loads(request.body.decode('utf-8'))
self.process_webhook(data)
return HttpResponse(status=200)
def process_webhook(self, data):
"""
Unimplemented method
"""
raise NotImplementedError
| mit | Python |
46b860e93d8a9e8dda3499b7306e30ebcd0e0174 | handle session stopped | rohitw1991/frappe,gangadharkadam/tailorfrappe,indictranstech/trufil-frappe,gangadharkadam/vervefrappe,letzerp/framework,gangadhar-kadam/verve_frappe,RicardoJohann/frappe,vjFaLk/frappe,gangadharkadam/v5_frappe,rohitwaghchaure/frappe,suyashphadtare/propshikhari-frappe,saurabh6790/frappe,indictranstech/ebuy-now-frappe,gangadhar-kadam/prjlib,indictranstech/tele-frappe,elba7r/frameworking,gangadharkadam/saloon_frappe_install,saurabh6790/pow-lib,saurabh6790/test_final_med_lib,almeidapaulopt/frappe,deveninfotech/deven-frappe,manassolanki/frappe,saurabh6790/omn-lib,rohitw1991/smartfrappe,RicardoJohann/frappe,MaxMorais/frappe,webnotes/wnframework,gangadharkadam/office_frappe,adityahase/frappe,saurabh6790/tru_lib_back,gangadharkadam/vlinkfrappe,indictranstech/ebuy-now-frappe,BhupeshGupta/frappe,saurabh6790/test-frappe,tmimori/frappe,maxtorete/frappe,Amber-Creative/amber-frappe,saurabh6790/-aimobilize-lib,reachalpineswift/frappe-bench,vCentre/vFRP-6233,gangadharkadam/vlinkfrappe,gangadharkadam/vlinkfrappe,indictranstech/internal-frappe,Amber-Creative/amber-frappe,pawaranand/phr_frappe,indictranstech/omnitech-frappe,elba7r/frameworking,rohitwaghchaure/vestasi-frappe,tundebabzy/frappe,suyashphadtare/sajil-frappe,paurosello/frappe,bohlian/frappe,drukhil/frappe,indictranstech/ebuy-now-frappe,vjFaLk/frappe,gangadharkadam/vervefrappe,rmehta/frappe,anandpdoshi/frappe,gangadharkadam/saloon_frappe,gangadhar-kadam/hrfrappe,rohitwaghchaure/frappe-alec,sbkolate/sap_frappe_v6,rohitwaghchaure/frappe-digitales,saurabh6790/medlib,rkawale/Internalhr-frappe,sbktechnology/trufil-frappe,indictranstech/phr-frappe,MaxMorais/frappe,neilLasrado/frappe,saurabh6790/test-frappe,indictranstech/fbd_frappe,saurabh6790/pow-lib,jevonearth/frappe,gangadharkadam/johnfrappe,gangadharkadam/letzfrappe,mbauskar/omnitech-frappe,rkawale/Internalhr-frappe,praba230890/frappe,saurabh6790/medsynaptic1-lib,saguas/frappe,saurabh6790/test-med-lib,ashokrajbathu/secondrep,sbktechnology/sap_frappe,StrellaGroup/frappe,saurabh6790/omnit-lib,gangadharkadam/smrtfrappe,hatwar/buyback-frappe,paurosello/frappe,rohitwaghchaure/frappe-digitales,suyashphadtare/propshikhari-frappe,ashokrajbathu/secondrep,gangadharkadam/v5_frappe,gangadharkadam/vervefrappe,sbktechnology/sap_frappe,saurabh6790/phr-frappe,tmimori/frappe,indictranstech/omnitech-frappe,shitolepriya/test-frappe,rohitwaghchaure/frappe-digitales,gangadharkadam/vervefrappe,gangadhar-kadam/smrterpfrappe,bohlian/frappe,mbauskar/tele-frappe,indictranstech/frappe,indautgrp/frappe,paurosello/frappe,deveninfotech/deven-frappe,mbauskar/frappe,pawaranand/phr_frappe,neilLasrado/frappe,rohitwaghchaure/New_Theme_frappe,mbauskar/phr-frappe,saurabh6790/test-med-lib,saurabh6790/trufil_lib,saurabh6790/medsynaptic1-lib,Tejal011089/digitales_frappe,erpletzerp/letzerpcore,shitolepriya/test-frappe,jevonearth/frappe,bohlian/frappe,gangadhar-kadam/smrterpfrappe,ESS-LLP/frappe,pranalik/parjanalib,bcornwellmott/frappe,mbauskar/helpdesk-frappe,rohitwaghchaure/frappe-alec,saurabh6790/ON-RISLIB,jevonearth/frappe,gangadharkadam/v5_frappe,indictranstech/osmosis-frappe,gangadharkadam/office_frappe,Tejal011089/digitales_frappe,shitolepriya/test-frappe,sbktechnology/sap_frappe,gangadhar-kadam/verve_frappe,indictranstech/fbd_frappe,gangadharkadam/smrtfrappe,reachalpineswift/frappe-bench,rohitwaghchaure/frappe-digitales,vCentre/vFRP-6233,saurabh6790/aimobilize-lib-backup,pombredanne/frappe,saurabh6790/omnisys-lib,indictranstech/reciphergroup-frappe,indictranstech/internal-frappe,gangadharkadam/stfrappe,indictranstech/osmosis-frappe,gangadhar-kadam/verve_frappe,rohitwaghchaure/frappe-alec,saurabh6790/med_test_lib,ShashaQin/frappe,maxtorete/frappe,gangadhar-kadam/prjlib,vqw/frappe,indictranstech/reciphergroup-frappe,rohitwaghchaure/frappe,saurabh6790/omnisys-lib,nerevu/frappe,gangadharkadam/saloon_frappe,gangadharkadam/frappecontribution,gangadharkadam/frappecontribution,saurabh6790/medsynaptic-lib,erpletzerp/letzerpcore,chdecultot/frappe,shitolepriya/test-frappe,adityahase/frappe,vqw/frappe,hatwar/buyback-frappe,vqw/frappe,maxtorete/frappe,saurabh6790/omnitech-lib,indictranstech/tele-frappe,PriyaShitole/MedViger-lib,indictranstech/osmosis-frappe,tundebabzy/frappe,saurabh6790/med_lib_rels,saurabh6790/omnisys-lib,nerevu/frappe,mbauskar/frappe,Amber-Creative/amber-frappe,gangadhar-kadam/verve_frappe,gangadharkadam/shfr,mbauskar/omnitech-frappe,gangadharkadam/v5_frappe,aboganas/frappe,gangadhar-kadam/verve_live_frappe,suyashphadtare/sajil-final-frappe,deveninfotech/deven-frappe,vqw/frappe,bcornwellmott/frappe,pawaranand/phr-frappe,vCentre/vFRP-6233,MaxMorais/frappe,elba7r/frameworking,mbauskar/frappe,bohlian/frappe,saurabh6790/medsynaptic-lib,saurabh6790/med_new_lib,ShashaQin/frappe,sbkolate/sap_frappe_v6,saurabh6790/tru_lib_back,mbauskar/omnitech-demo-frappe,saurabh6790/med_test_lib,mbauskar/Das_frappe,saurabh6790/omnitech-libs,vCentre/vFRP-6233,aboganas/frappe,mbauskar/phr-frappe,mbauskar/phr-frappe,neilLasrado/frappe,geo-poland/frappe,saurabh6790/omnitech-lib,nabinhait/frappe,manassolanki/frappe,saurabh6790/alert-med-lib,mhbu50/frappe,rohitwaghchaure/frappe,gangadhar-kadam/verve_test_frappe,gangadharkadam/v4_frappe,indictranstech/phr-frappe,suyashphadtare/sajil-frappe,saurabh6790/test-frappe,mbauskar/phr-frappe,letzerp/framework,saurabh6790/medsyn-lib,indictranstech/frappe-digitales,cadencewatches/frappe,gangadhar-kadam/laganfrappe,pombredanne/frappe,yashodhank/frappe,pawaranand/phr_frappe,mbauskar/omnitech-demo-frappe,frappe/frappe,mbauskar/tele-frappe,hernad/frappe,indictranstech/fbd_frappe,gangadharkadam/saloon_frappe_install,mbauskar/helpdesk-frappe,suyashphadtare/sajil-final-frappe,gangadharkadam/v4_frappe,jevonearth/frappe,anandpdoshi/frappe,hernad/frappe,pranalik/frappe-bb,ShashaQin/frappe,ESS-LLP/frappe,rohitw1991/frappe,manassolanki/frappe,saurabh6790/-aimobilize-lib,indictranstech/reciphergroup-frappe,indictranstech/tele-frappe,indictranstech/frappe,bcornwellmott/frappe,suyashphadtare/propshikhari-frappe,saguas/frappe,gangadharkadam/v6_frappe,pombredanne/frappe,saurabh6790/frappe,rohitwaghchaure/New_Theme_frappe,indictranstech/internal-frappe,saurabh6790/aimobilize-lib-backup,indictranstech/frappe,rohitwaghchaure/frappe,indictranstech/trufil-frappe,indictranstech/Das_frappe,gangadhar-kadam/verve_test_frappe,webnotes/wnframework,gangadhar-kadam/nassimlib,gangadhar-kadam/verve_test_frappe,BhupeshGupta/frappe,mbauskar/tele-frappe,mbauskar/Das_frappe,StrellaGroup/frappe,gangadharkadam/letzfrappe,rohitw1991/smartfrappe,tmimori/frappe,saguas/frappe,Tejal011089/digitales_frappe,indictranstech/Das_frappe,RicardoJohann/frappe,suyashphadtare/sajil-frappe,paurosello/frappe,rohitwaghchaure/New_Theme_frappe,indictranstech/fbd_frappe,indictranstech/omnitech-frappe,almeidapaulopt/frappe,StrellaGroup/frappe,gangadhar-kadam/verve_test_frappe,sbktechnology/trufil-frappe,saurabh6790/medlib,webnotes/wnframework,gangadharkadam/saloon_frappe,MaxMorais/frappe,rohitwaghchaure/vestasi-frappe,saurabh6790/OFF-RISLIB,indictranstech/frappe-digitales,vjFaLk/frappe,saurabh6790/alert-med-lib,saurabh6790/frappe,saurabh6790/med_lib_test,saurabh6790/frappe,pranalik/frappe-bb,Tejal011089/medsyn2_lib,gangadharkadam/shfr,sbktechnology/trufil-frappe,drukhil/frappe,hernad/frappe,tmimori/frappe,letzerp/framework,gangadharkadam/v6_frappe,gangadharkadam/saloon_frappe_install,chdecultot/frappe,mbauskar/Das_frappe,pawaranand/phr-frappe,suyashphadtare/propshikhari-frappe,gangadhar-kadam/verve_live_frappe,gangadhar-kadam/lgnlvefrape,pranalik/frappe-bb,praba230890/frappe,saurabh6790/OFF-RISLIB,gangadhar-kadam/laganfrappe,gangadhar-kadam/helpdesk-frappe,ashokrajbathu/secondrep,indictranstech/tele-frappe,gangadharkadam/letzfrappe,erpletzerp/letzerpcore,gangadharkadam/frappecontribution,gangadhar-kadam/hrfrappe,rohitw1991/smarttailorfrappe,gangadharkadam/v6_frappe,yashodhank/frappe,bcornwellmott/frappe,aboganas/frappe,saurabh6790/omni-libs,pawaranand/phr_frappe,mhbu50/frappe,elba7r/builder,tundebabzy/frappe,saurabh6790/omni-libs,rohitwaghchaure/frappe_smart,aboganas/frappe,elba7r/builder,gangadharkadam/tailorfrappe,saurabh6790/phr-frappe,mbauskar/Das_frappe,sbktechnology/sap_frappe,almeidapaulopt/frappe,rmehta/frappe,saurabh6790/medsyn-lib1,indictranstech/Das_frappe,indictranstech/phr-frappe,indictranstech/frappe-digitales,geo-poland/frappe,gangadharkadam/v4_frappe,gangadhar-kadam/lgnlvefrape,saurabh6790/medsyn-lib1,saurabh6790/medsyn-lib,Tejal011089/digitales_frappe,gangadharkadam/johnfrappe,pawaranand/phr-frappe,drukhil/frappe,praba230890/frappe,elba7r/builder,PriyaShitole/MedViger-lib,indautgrp/frappe,gangadhar-kadam/laganfrappe,saurabh6790/trufil_lib,saurabh6790/omnit-lib,gangadhar-kadam/nassimlib,indautgrp/frappe,suyashphadtare/sajil-final-frappe,drukhil/frappe,adityahase/frappe,reachalpineswift/frappe-bench,saurabh6790/med_lib_rels,sbkolate/sap_frappe_v6,anandpdoshi/frappe,yashodhank/frappe,gangadharkadam/v4_frappe,indictranstech/phr-frappe,mhbu50/frappe,BhupeshGupta/frappe,rohitwaghchaure/vestasi-frappe,gangadharkadam/office_frappe,rohitwaghchaure/vestasi-frappe,gangadhar-kadam/helpdesk-frappe,Amber-Creative/amber-frappe,tundebabzy/frappe,ashokrajbathu/secondrep,indictranstech/reciphergroup-frappe,pombredanne/frappe,indautgrp/frappe,nabinhait/frappe,BhupeshGupta/frappe,ESS-LLP/frappe,saguas/frappe,indictranstech/omnitech-frappe,gangadharkadam/saloon_frappe,geo-poland/frappe,gangadhar-kadam/verve_live_frappe,indictranstech/osmosis-frappe,chdecultot/frappe,maxtorete/frappe,vjFaLk/frappe,indictranstech/ebuy-now-frappe,mhbu50/frappe,yashodhank/frappe,ESS-LLP/frappe,gangadharkadam/frappecontribution,pranalik/frappe-bb,mbauskar/frappe,indictranstech/internal-frappe,indictranstech/frappe-digitales,saurabh6790/phr-frappe,mbauskar/omnitech-frappe,mbauskar/tele-frappe,saurabh6790/phr-frappe,saurabh6790/med_new_lib,praba230890/frappe,mbauskar/omnitech-demo-frappe,erpletzerp/letzerpcore,saurabh6790/test-frappe,saurabh6790/med_lib_test,hatwar/buyback-frappe,adityahase/frappe,almeidapaulopt/frappe,gangadhar-kadam/lgnlvefrape,anandpdoshi/frappe,neilLasrado/frappe,gangadharkadam/saloon_frappe_install,cadencewatches/frappe,elba7r/builder,manassolanki/frappe,hernad/frappe,frappe/frappe,mbauskar/helpdesk-frappe,sbktechnology/trufil-frappe,saurabh6790/omnitech-libs,gangadhar-kadam/helpdesk-frappe,letzerp/framework,mbauskar/omnitech-frappe,rohitwaghchaure/frappe_smart,nerevu/frappe,elba7r/frameworking,hatwar/buyback-frappe,saurabh6790/test_final_med_lib,saurabh6790/ON-RISLIB,mbauskar/helpdesk-frappe,mbauskar/omnitech-demo-frappe,sbkolate/sap_frappe_v6,saurabh6790/trufil_lib,rmehta/frappe,saurabh6790/omn-lib,Tejal011089/medsyn2_lib,frappe/frappe,gangadhar-kadam/helpdesk-frappe,gangadhar-kadam/verve_live_frappe,indictranstech/frappe,pranalik/parjanalib,gangadharkadam/letzfrappe,chdecultot/frappe,ShashaQin/frappe,deveninfotech/deven-frappe,indictranstech/Das_frappe,gangadharkadam/vlinkfrappe,indictranstech/trufil-frappe,RicardoJohann/frappe,indictranstech/trufil-frappe,reachalpineswift/frappe-bench,rmehta/frappe,nerevu/frappe,gangadharkadam/v6_frappe,gangadharkadam/stfrappe,rohitw1991/smarttailorfrappe | webnotes/app.py | webnotes/app.py | import sys, os
import json
sys.path.insert(0, '.')
sys.path.insert(0, 'app')
sys.path.insert(0, 'lib')
from werkzeug.wrappers import Request, Response
from werkzeug.local import LocalManager
from webnotes.middlewares import StaticDataMiddleware
from werkzeug.exceptions import HTTPException
from werkzeug.contrib.profiler import ProfilerMiddleware
from webnotes import get_config
import mimetypes
import webnotes
import webnotes.handler
import webnotes.auth
import webnotes.webutils
local_manager = LocalManager([webnotes.local])
def handle_session_stopped():
res = Response("""<html>
<body style="background-color: #EEE;">
<h3 style="width: 900px; background-color: #FFF; border: 2px solid #AAA; padding: 20px; font-family: Arial; margin: 20px auto">
Updating.
We will be back in a few moments...
</h3>
</body>
</html>""")
res.status_code = 503
res.content_type = 'text/html'
return res
@Request.application
def application(request):
webnotes.local.request = request
try:
site = webnotes.utils.get_site_name(request.host)
webnotes.init(site=site)
webnotes.local.form_dict = webnotes._dict({ k:v[0] if isinstance(v, (list, tuple)) else v \
for k, v in (request.form or request.args).iteritems() })
webnotes.local._response = Response()
try:
webnotes.http_request = webnotes.auth.HTTPRequest()
except webnotes.AuthenticationError, e:
pass
if webnotes.form_dict.cmd:
webnotes.handler.handle()
else:
webnotes.webutils.render(webnotes.request.path[1:])
except HTTPException, e:
return e
except webnotes.SessionStopped, e:
webnotes.local._response = handle_session_stopped()
finally:
if webnotes.conn:
webnotes.conn.close()
return webnotes.local._response
application = local_manager.make_middleware(application)
def serve(port=8000, profile=False):
webnotes.validate_versions()
global application
from werkzeug.serving import run_simple
if profile:
application = ProfilerMiddleware(application)
application = StaticDataMiddleware(application, {
'/': 'public',
})
run_simple('0.0.0.0', int(port), application, use_reloader=True,
use_debugger=True, use_evalex=True)
| import sys, os
import json
sys.path.insert(0, '.')
sys.path.insert(0, 'app')
sys.path.insert(0, 'lib')
from werkzeug.wrappers import Request, Response
from werkzeug.local import LocalManager
from webnotes.middlewares import StaticDataMiddleware
from werkzeug.exceptions import HTTPException
from werkzeug.contrib.profiler import ProfilerMiddleware
from webnotes import get_config
import mimetypes
import webnotes
import webnotes.handler
import webnotes.auth
import webnotes.webutils
local_manager = LocalManager([webnotes.local])
@Request.application
def application(request):
webnotes.local.request = request
try:
site = webnotes.utils.get_site_name(request.host)
webnotes.init(site=site)
webnotes.local.form_dict = webnotes._dict({ k:v[0] if isinstance(v, (list, tuple)) else v \
for k, v in (request.form or request.args).iteritems() })
webnotes.local._response = Response()
try:
webnotes.http_request = webnotes.auth.HTTPRequest()
except webnotes.AuthenticationError, e:
pass
if webnotes.form_dict.cmd:
webnotes.handler.handle()
else:
webnotes.webutils.render(webnotes.request.path[1:])
except HTTPException, e:
return e
finally:
if webnotes.conn:
webnotes.conn.close()
return webnotes._response
application = local_manager.make_middleware(application)
def serve(port=8000, profile=False):
webnotes.validate_versions()
global application
from werkzeug.serving import run_simple
if profile:
application = ProfilerMiddleware(application)
application = StaticDataMiddleware(application, {
'/': 'public',
})
run_simple('0.0.0.0', int(port), application, use_reloader=True,
use_debugger=True, use_evalex=True)
| mit | Python |
e38fa3f55b0e60a1d6c7fa0cf194e6f3bd4b899d | add histogram util | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/util/datadog/gauges.py | corehq/util/datadog/gauges.py | from functools import wraps
from celery.task import periodic_task
from corehq.util.datadog import statsd, datadog_logger
from corehq.util.soft_assert import soft_assert
def datadog_gauge_task(name, fn, run_every, enforce_prefix='commcare'):
"""
helper for easily registering datadog gauges to run periodically
To update a datadog gauge on a schedule based on the result of a function
just add to your app's tasks.py:
my_calculation = datadog_gauge_task('my.datadog.metric', my_calculation_function,
run_every=crontab(minute=0))
"""
_enforce_prefix(name, enforce_prefix)
datadog_gauge = _DatadogGauge(name, fn, run_every)
return datadog_gauge.periodic_task()
def datadog_histogram(name, value, enforce_prefix='commcare', tags=None):
"""
Usage: Used to track the statistical distribution of a set of values over a statsd flush period.
Actually submits as multiple metrics:
"""
_datadog_record(statsd.histogram, name, value, enforce_prefix, tags)
def datadog_gauge(name, value, enforce_prefix='commcare', tags=None):
_datadog_record(statsd.gauge, name, value, enforce_prefix, tags)
def datadog_counter(name, value=1, enforce_prefix='commcare', tags=None):
_datadog_record(statsd.increment, name, value, enforce_prefix, tags)
def _datadog_record(fn, name, value, enforce_prefix='commcare', tags=None):
_enforce_prefix(name, enforce_prefix)
try:
fn(name, value, tags=tags)
except Exception:
datadog_logger.exception('Unable to record Datadog stats')
class _DatadogGauge(object):
def __init__(self, name, fn, run_every):
self.name = name
self.fn = fn
self.run_every = run_every
def periodic_task(self):
@periodic_task('background_queue', run_every=self.run_every,
acks_late=True, ignore_result=True)
@wraps(self.fn)
def inner(*args, **kwargs):
statsd.gauge(self.name, self.fn(*args, **kwargs))
return inner
def _enforce_prefix(name, prefix):
soft_assert(fail_if_debug=True).call(
not prefix or name.split('.')[0] == prefix,
"Did you mean to call your gauge 'commcare.{}'? "
"If you're sure you want to forgo the prefix, you can "
"pass enforce_prefix=None".format(name))
| from functools import wraps
from celery.task import periodic_task
from corehq.util.datadog import statsd, datadog_logger
from corehq.util.soft_assert import soft_assert
def datadog_gauge_task(name, fn, run_every, enforce_prefix='commcare'):
"""
helper for easily registering datadog gauges to run periodically
To update a datadog gauge on a schedule based on the result of a function
just add to your app's tasks.py:
my_calculation = datadog_gauge_task('my.datadog.metric', my_calculation_function,
run_every=crontab(minute=0))
"""
_enforce_prefix(name, enforce_prefix)
datadog_gauge = _DatadogGauge(name, fn, run_every)
return datadog_gauge.periodic_task()
def datadog_gauge(name, value, enforce_prefix='commcare', tags=None):
_datadog_record(statsd.gauge, name, value, enforce_prefix, tags)
def datadog_counter(name, value=1, enforce_prefix='commcare', tags=None):
_datadog_record(statsd.increment, name, value, enforce_prefix, tags)
def _datadog_record(fn, name, value, enforce_prefix='commcare', tags=None):
_enforce_prefix(name, enforce_prefix)
try:
fn(name, value, tags=tags)
except Exception:
datadog_logger.exception('Unable to record Datadog stats')
class _DatadogGauge(object):
def __init__(self, name, fn, run_every):
self.name = name
self.fn = fn
self.run_every = run_every
def periodic_task(self):
@periodic_task('background_queue', run_every=self.run_every,
acks_late=True, ignore_result=True)
@wraps(self.fn)
def inner(*args, **kwargs):
statsd.gauge(self.name, self.fn(*args, **kwargs))
return inner
def _enforce_prefix(name, prefix):
soft_assert(fail_if_debug=True).call(
not prefix or name.split('.')[0] == prefix,
"Did you mean to call your gauge 'commcare.{}'? "
"If you're sure you want to forgo the prefix, you can "
"pass enforce_prefix=None".format(name))
| bsd-3-clause | Python |
3643f0ce1b7ea7982e8081ae29e726c73471cc4b | update description | tony/vcspull,tony/vcspull | vcspull/__about__.py | vcspull/__about__.py | __title__ = 'vcspull'
__package_name__ = 'vcspull'
__description__ = 'synchronize your repos'
__version__ = '1.0.0'
__author__ = 'Tony Narlock'
__email__ = '[email protected]'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013-2016 Tony Narlock'
| __title__ = 'vcspull'
__package_name__ = 'vcspull'
__description__ = 'vcs project manager'
__version__ = '1.0.0'
__author__ = 'Tony Narlock'
__email__ = '[email protected]'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013-2016 Tony Narlock'
| mit | Python |
42561d709a2ecfee71103dfbb55116cec1128b71 | fix redirect after upload | ecaldwe1/zika,vecnet/zika,ecaldwe1/zika,vecnet/zika,ecaldwe1/zika,ecaldwe1/zika,ecaldwe1/zika,vecnet/zika,vecnet/zika,vecnet/zika | website/apps/home/views/UploadView.py | website/apps/home/views/UploadView.py | #!/bin/env python2
# -*- coding: utf-8 -*-
#
# This file is part of the VecNet Zika modeling interface.
# For copyright and licensing information about this package, see the
# NOTICE.txt and LICENSE.txt files in its top-level directory; they are
# available at https://github.com/vecnet/zika
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License (MPL), version 2.0. If a copy of the MPL was not distributed
# with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
import logging
from django.core.urlresolvers import reverse
from django.db import transaction
from django.http.response import HttpResponseBadRequest, HttpResponseRedirect
from django.views.generic.base import TemplateView
from website.apps.home.utils import load_simulation_file
logger = logging.getLogger(__name__)
class UploadView(TemplateView):
template_name = "../templates/simulation/upload.html"
@transaction.atomic
def post(self, request, *args, **kwargs):
if request.method == 'POST':
if not request.FILES['output_file']:
return HttpResponseBadRequest("No 'output_file' is provided")
else:
sim_name = self.request.POST.get(u"name", None)
is_historical = self.request.POST.get("historical")
load_simulation_file(request.FILES['output_file'], simulation_name=sim_name, is_historical=is_historical)
# Redirect to appropriate page whether uploading simulation or historical
if is_historical!='on':
return HttpResponseRedirect(reverse('home.display_simulations'))
else:
return HttpResponseRedirect(reverse('home.display_historical'))
else:
return HttpResponseRedirect("")
| #!/bin/env python2
# -*- coding: utf-8 -*-
#
# This file is part of the VecNet Zika modeling interface.
# For copyright and licensing information about this package, see the
# NOTICE.txt and LICENSE.txt files in its top-level directory; they are
# available at https://github.com/vecnet/zika
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License (MPL), version 2.0. If a copy of the MPL was not distributed
# with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
import logging
from django.core.urlresolvers import reverse
from django.db import transaction
from django.http.response import HttpResponseBadRequest, HttpResponseRedirect
from django.views.generic.base import TemplateView
from website.apps.home.utils import load_simulation_file
logger = logging.getLogger(__name__)
class UploadView(TemplateView):
template_name = "../templates/simulation/upload.html"
@transaction.atomic
def post(self, request, *args, **kwargs):
if request.method == 'POST':
if not request.FILES['output_file']:
return HttpResponseBadRequest("No 'output_file' is provided")
else:
sim_name = self.request.POST.get(u"name", None)
is_historical = self.request.POST.get("historical")
load_simulation_file(request.FILES['output_file'], simulation_name=sim_name, is_historical=is_historical)
return HttpResponseRedirect(reverse('home.display_simulations'))
else:
return HttpResponseRedirect("")
| mpl-2.0 | Python |
c9a915692b30458717ead2f83fce77ce295e5ed9 | add recipe_folder member (#10527) | conan-io/conan,conan-io/conan,conan-io/conan | conans/pylint_plugin.py | conans/pylint_plugin.py | """Pylint plugin for ConanFile"""
import astroid
from astroid import MANAGER
def register(linter):
"""Declare package as plugin
This function needs to be declared so astroid treats
current file as a plugin.
"""
pass
def transform_conanfile(node):
"""Transform definition of ConanFile class so dynamic fields are visible to pylint"""
str_class = astroid.builtin_lookup("str")
info_class = MANAGER.ast_from_module_name("conans.model.info").lookup(
"ConanInfo")
build_requires_class = MANAGER.ast_from_module_name(
"conans.client.graph.graph_manager").lookup("_RecipeBuildRequires")
file_copier_class = MANAGER.ast_from_module_name(
"conans.client.file_copier").lookup("FileCopier")
file_importer_class = MANAGER.ast_from_module_name(
"conans.client.importer").lookup("_FileImporter")
python_requires_class = MANAGER.ast_from_module_name(
"conans.client.graph.python_requires").lookup("PyRequires")
dynamic_fields = {
"conan_data": str_class,
"build_requires": build_requires_class,
"info_build": info_class,
"info": info_class,
"copy": file_copier_class,
"copy_deps": file_importer_class,
"python_requires": [str_class, python_requires_class],
"recipe_folder": str_class,
}
for f, t in dynamic_fields.items():
node.locals[f] = [t]
MANAGER.register_transform(
astroid.ClassDef, transform_conanfile,
lambda node: node.qname() == "conans.model.conan_file.ConanFile")
def _python_requires_member():
return astroid.parse("""
from conans.client.graph.python_requires import ConanPythonRequire
python_requires = ConanPythonRequire()
""")
astroid.register_module_extender(astroid.MANAGER, "conans", _python_requires_member)
| """Pylint plugin for ConanFile"""
import astroid
from astroid import MANAGER
def register(linter):
"""Declare package as plugin
This function needs to be declared so astroid treats
current file as a plugin.
"""
pass
def transform_conanfile(node):
"""Transform definition of ConanFile class so dynamic fields are visible to pylint"""
str_class = astroid.builtin_lookup("str")
info_class = MANAGER.ast_from_module_name("conans.model.info").lookup(
"ConanInfo")
build_requires_class = MANAGER.ast_from_module_name(
"conans.client.graph.graph_manager").lookup("_RecipeBuildRequires")
file_copier_class = MANAGER.ast_from_module_name(
"conans.client.file_copier").lookup("FileCopier")
file_importer_class = MANAGER.ast_from_module_name(
"conans.client.importer").lookup("_FileImporter")
python_requires_class = MANAGER.ast_from_module_name(
"conans.client.graph.python_requires").lookup("PyRequires")
dynamic_fields = {
"conan_data": str_class,
"build_requires": build_requires_class,
"info_build": info_class,
"info": info_class,
"copy": file_copier_class,
"copy_deps": file_importer_class,
"python_requires": [str_class, python_requires_class],
}
for f, t in dynamic_fields.items():
node.locals[f] = [t]
MANAGER.register_transform(
astroid.ClassDef, transform_conanfile,
lambda node: node.qname() == "conans.model.conan_file.ConanFile")
def _python_requires_member():
return astroid.parse("""
from conans.client.graph.python_requires import ConanPythonRequire
python_requires = ConanPythonRequire()
""")
astroid.register_module_extender(astroid.MANAGER, "conans", _python_requires_member)
| mit | Python |
4b5ae262bab0bc0c83555d39400049f20aaca9cd | Add CONVERSATION_LABEL_MAX_LENGTH constant | vkosuri/ChatterBot,gunthercox/ChatterBot | chatterbot/constants.py | chatterbot/constants.py | """
ChatterBot constants
"""
'''
The maximum length of characters that the text of a statement can contain.
This should be enforced on a per-model basis by the data model for each
storage adapter.
'''
STATEMENT_TEXT_MAX_LENGTH = 400
'''
The maximum length of characters that the text label of a conversation can contain.
The number 32 was chosen because that is the length of the string representation
of a UUID4 with no hyphens.
'''
CONVERSATION_LABEL_MAX_LENGTH = 32
# The maximum length of characters that the name of a tag can contain
TAG_NAME_MAX_LENGTH = 50
DEFAULT_DJANGO_APP_NAME = 'django_chatterbot'
| """
ChatterBot constants
"""
'''
The maximum length of characters that the text of a statement can contain.
This should be enforced on a per-model basis by the data model for each
storage adapter.
'''
STATEMENT_TEXT_MAX_LENGTH = 400
# The maximum length of characters that the name of a tag can contain
TAG_NAME_MAX_LENGTH = 50
DEFAULT_DJANGO_APP_NAME = 'django_chatterbot'
| bsd-3-clause | Python |
7a1e57fa5c6d2c6330a73e8fab95c5ef6fa0ea35 | Fix indentation | thewtex/tomviz,cjh1/tomviz,cjh1/tomviz,mathturtle/tomviz,mathturtle/tomviz,OpenChemistry/tomviz,cryos/tomviz,cryos/tomviz,thewtex/tomviz,OpenChemistry/tomviz,cryos/tomviz,mathturtle/tomviz,OpenChemistry/tomviz,cjh1/tomviz,OpenChemistry/tomviz,thewtex/tomviz | tomviz/python/SetNegativeVoxelsToZero.py | tomviz/python/SetNegativeVoxelsToZero.py | def transform_scalars(dataset):
"""Set negative voxels to zero"""
from tomviz import utils
import numpy as np
data = utils.get_array(dataset)
data[data<0] = 0 #set negative voxels to zero
# set the result as the new scalars.
utils.set_array(dataset, data) | def transform_scalars(dataset):
"""Set negative voxels to zero"""
from tomviz import utils
import numpy as np
data = utils.get_array(dataset)
data[data<0] = 0 #set negative voxels to zero
# set the result as the new scalars.
utils.set_array(dataset, data)
| bsd-3-clause | Python |
46e2997cb51e45dc58f5a97cea6642ba64d03188 | Fix 9.0 version | SerpentCS/purchase-workflow,SerpentCS/purchase-workflow,Eficent/purchase-workflow,SerpentCS/purchase-workflow,Eficent/purchase-workflow | purchase_all_shipments/__openerp__.py | purchase_all_shipments/__openerp__.py | # Author: Leonardo Pistone
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
{'name': 'Purchase All Shipments',
'version': '9.0.1.0.0',
'author': "Camptocamp,Odoo Community Association (OCA)",
'category': 'Purchases',
'license': 'AGPL-3',
'depends': ['purchase'],
'data': ['view/purchase_order.xml'],
}
| # Author: Leonardo Pistone
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
{'name': 'Purchase All Shipments',
'version': '8.0.1.0.0',
'author': "Camptocamp,Odoo Community Association (OCA)",
'category': 'Purchases',
'license': 'AGPL-3',
'depends': ['purchase'],
'data': ['view/purchase_order.xml'],
}
| agpl-3.0 | Python |
5aca45a68a229f43a25dd97d2c680716c9baabf5 | add travis env to sgen | tz70s/pro-sy-kuo,tz70s/pro-sy-kuo,tz70s/pro-sy-kuo,tz70s/pro-sy-kuo | scripts/sgen.py | scripts/sgen.py | #!/usr/bin/python
# Generate original static file to another with new prefix
# ./sgen index.html old_prefix static_index.html new_prefix
import sys
from os import walk, path, environ
# File lists
# The two file lists should be aligned.
root = environ['TRAVIS_BUILD_DIR']
files = []
for (dirpath, dirname, filenames) in walk( root + "/static"):
for f in filenames:
if ".html" in f:
files.append(dirpath + "/" + f)
# prefix of target files
target_prefix = root + "/docs"
target_files = []
for f in files:
target_files.append(f.replace( root + "/static", target_prefix))
print(target_files)
# Variables of parsing
def parse_args():
if len(sys.argv) < 3:
print ("Not enough arguments")
exit(1)
original_prefix = sys.argv[1]
new_prefix = sys.argv[2]
# unsafe checkout prefix
if original_prefix[0] != 'h' or original_prefix[-1] != '/' or new_prefix[0] != 'h' or new_prefix[-1] != '/':
print ("Seems something wrong on the prefix")
exit(1)
return original_prefix, new_prefix
def sgen():
original_prefix, new_prefix = parse_args()
# parse the publications_ref into the appropriate html format
for i in range(len(files)):
with open(files[i]) as f:
content = f.read()
new_content = content.replace(original_prefix, new_prefix)
with open(target_files[i], "w+") as f:
f.write(new_content)
sgen()
| #!/usr/bin/python
# Generate original static file to another with new prefix
# ./sgen index.html old_prefix static_index.html new_prefix
import sys
from os import walk, path
# File lists
# The two file lists should be aligned.
files = []
for (dirpath, dirname, filenames) in walk("../static"):
for f in filenames:
if ".html" in f:
files.append(dirpath + "/" + f)
# prefix of target files
target_prefix = "../docs"
target_files = []
for f in files:
target_files.append(f.replace("../static", target_prefix))
print(target_files)
# Variables of parsing
def parse_args():
if len(sys.argv) < 3:
print ("Not enough arguments")
exit(1)
original_prefix = sys.argv[1]
new_prefix = sys.argv[2]
# unsafe checkout prefix
if original_prefix[0] != 'h' or original_prefix[-1] != '/' or new_prefix[0] != 'h' or new_prefix[-1] != '/':
print ("Seems something wrong on the prefix")
exit(1)
return original_prefix, new_prefix
def sgen():
original_prefix, new_prefix = parse_args()
# parse the publications_ref into the appropriate html format
for i in range(len(files)):
with open(files[i]) as f:
content = f.read()
new_content = content.replace(original_prefix, new_prefix)
with open(target_files[i], "w+") as f:
f.write(new_content)
sgen() | mit | Python |
24cebbd351875103067162733cf682320df29cf6 | Update VMfileconvert_V2.py | jcornford/pyecog | pyecog/light_code/VMfileconvert_V2.py | pyecog/light_code/VMfileconvert_V2.py | import glob, os, numpy, sys
try:
import stfio
except:
sys.path.append('C:\Python27\Lib\site-packages')
import stfio
def main():
searchpath = os.getcwd()
exportdirectory = searchpath+'/ConvertedFiles/'
# Make export directory
if not os.path.exists(exportdirectory):
os.makedirs(exportdirectory)
# Walk through and find abf files
pattern = '*.abf'
datafilenames = glob.glob(pattern)
if datafilenames:
for filename in datafilenames:
print ('Converting '+str(filename))
data = stfio.read(filename,ftype = "abf")
x = data.aspandas()
x = x.values
numpy.save(exportdirectory+filename[0:-4],x)
if __name__ == '__main__':
main()
| import glob, os, numpy
import stfio
def main():
searchpath = os.getcwd()
exportdirectory = searchpath+'/ConvertedFiles/'
# Make export directory
if not os.path.exists(exportdirectory):
os.makedirs(exportdirectory)
# Walk through and find abf files
pattern = '*.abf'
datafilenames = glob.glob(pattern)
if datafilenames:
for filename in datafilenames:
print ('Converting '+str(filename))
data = stfio.read(filename,ftype = "abf")
x = data.aspandas()
x = x.values
numpy.save(exportdirectory+filename[0:-4],x)
if __name__ == '__main__':
main()
| mit | Python |
393bde7e7f3902f734e8c01f265b216f2d3eef26 | remove leftover | DUlSine/DUlSine,DUlSine/DUlSine | models/dulsine_commons.py | models/dulsine_commons.py | # -*- coding: utf-8 -*-
# vim: set ts=4
# Common enumerations used in some places
CIVILITES = (
('M.', 'Monsieur'),
('Mme', 'Madame'),
('Mlle', 'Mademoiselle')
)
CIRCUITS = (
('O', 'ouvert'),
('F', 'ferme'),
('N', 'pas de circuit')
)
TYPES_ACTEURS = (
('P', 'Professionnels'),
('A', 'Amateurs'),
('M', 'Mixte')
)
TEAM_TYPES = (
(0, 'PAPS'),
(1, 'Equipe'),
(2, 'Binome'),
(3, 'Equipe d\'Evacuation')
)
DIPLOME_CI = 0
DIPLOME_PSE2 = 1
DIPLOME_PSE1 = 2
DIPLOME_PSC1 = 3
DIPLOME_SECOURS = (
(DIPLOME_CI, 'CI'),
(DIPLOME_PSE2, 'PSE2'),
(DIPLOME_PSE1, 'PSE1'),
(DIPLOME_PSC1, 'PSC1'),
(4, 'IPS'),
(5, 'CDPE')
)
DIPLOME_CONDUCTEURS = (
(10, 'CH'),
(11, 'CHA'),
(12, '4x4')
)
DIPLOME_FORMATEURS = (
(20, 'FCI'),
(21, 'PAE1'),
(22, 'PAE2'),
(23, 'PAE3'),
(24, 'PAE4'),
)
FORMATIONS = DIPLOME_SECOURS + DIPLOME_CONDUCTEURS + DIPLOME_FORMATEURS
WISH_ND = 0
WISH_CHOICES = (
(WISH_ND, 'N.D.'),
(1, 'Disponible'),
(2, 'Intéressé'),
(3, 'Très intéressé'),
)
| # -*- coding: utf-8 -*-
# vim: set ts=4
# Common enumerations used in some places
CIVILITES = (
('M.', 'Monsieur'),
('Mme', 'Madame'),
('Mlle', 'Mademoiselle')
)
CIRCUITS = (
('O', 'ouvert'),
('F', 'ferme'),
('N', 'pas de circuit')
)
TYPES_ACTEURS = (
('P', 'Professionnels'),
('A', 'Amateurs'),
('M', 'Mixte')
)
TEAM_TYPES = (
(0, 'PAPS'),
(1, 'Equipe'),
(2, 'Binome'),
(3, 'Equipe d\'Evacuation')
)
DIPLOME_SECOURS = (
(0, 'N.D.'),
(1, 'CI'),
(2, 'PSE2'),
(3, 'PSE1'),
(4, 'PSC1'),
(5, 'IPS'),
(6, 'CDPE')
)
NOT_AVAILABLE = 0
DIPLOME_CI = 1
DIPLOME_PSE2 = 2
DIPLOME_PSE1 = 3
DIPLOME_PSC1 = 4
DIPLOME_CONDUCTEURS = (
(10, 'CH'),
(11, 'CHA'),
(12, '4x4')
)
DIPLOME_FORMATEURS = (
(20, 'FCI'),
(21, 'PAE1'),
(22, 'PAE2'),
(23, 'PAE3'),
(24, 'PAE4'),
)
FORMATIONS = DIPLOME_SECOURS + DIPLOME_CONDUCTEURS + DIPLOME_FORMATEURS
WISH_ND = 0
WISH_CHOICES = (
(WISH_ND, 'N.D.'),
(1, 'Disponible'),
(2, 'Intéressé'),
(3, 'Très intéressé'),
)
| agpl-3.0 | Python |
f9a99102a7053e444021926d08750f04a662fd9f | remove unnecessary print statements | jmfranck/pyspecdata,jmfranck/pyspecdata,jmfranck/pyspecdata,jmfranck/pyspecdata | pyspecdata/load_files/open_subpath.py | pyspecdata/load_files/open_subpath.py | from ..core import *
from ..datadir import dirformat
import os.path
from zipfile import ZipFile
def open_subpath(file_reference,*subpath,**kwargs):
"""
Parameters
----------
file_reference: str or tuple
If a string, then it's the name of a directory.
If it's a tuple, then, it has three elements: the ZipFile object, the
filename of the zip file (for reference), and the name of the file we're interested in
within the zip file.
test_only: bool
just test if the path exists
"""
mode,test_only = process_kwargs([('mode','r'),
('test_only',False)],kwargs)
if isinstance(file_reference,basestring):
if test_only:
full_path = os.path.join(file_reference, *subpath)
if os.path.exists(full_path):
return True
else:
return False
else:
fp = open(os.path.join(file_reference,*subpath),mode)
else:
if type(file_reference) == tuple:
if len(file_reference) == 3 and type(file_reference[0]) is ZipFile:
zf = file_reference[0]
zip_basename = file_reference[1]
name_inside_zip = file_reference[2]
subfile = '/'.join((name_inside_zip,)+subpath)
if test_only:
if subfile in zf.namelist():
return True
else:
return False
if subfile in zf.namelist():
return zf.open(subfile)
else:
raise ValueError(subfile+" not found in zip file")
else:
raise ValueError("open_subpath doesn't understand the format of the tuple passe to file_reference")
else:
raise ValueError("open_subpath doesn't understand the type of the file_reference")
return fp
| from ..core import *
from ..datadir import dirformat
import os.path
from zipfile import ZipFile
def open_subpath(file_reference,*subpath,**kwargs):
"""
Parameters
----------
file_reference: str or tuple
If a string, then it's the name of a directory.
If it's a tuple, then, it has three elements: the ZipFile object, the
filename of the zip file (for reference), and the name of the file we're interested in
within the zip file.
test_only: bool
just test if the path exists
"""
mode,test_only = process_kwargs([('mode','r'),
('test_only',False)],kwargs)
if isinstance(file_reference,basestring):
if test_only:
print "testing",(file_reference,) + subpath
full_path = os.path.join(file_reference, *subpath)
if os.path.exists(full_path):
return True
else:
return False
else:
fp = open(os.path.join(file_reference,*subpath),mode)
else:
if type(file_reference) == tuple:
if len(file_reference) == 3 and type(file_reference[0]) is ZipFile:
zf = file_reference[0]
zip_basename = file_reference[1]
name_inside_zip = file_reference[2]
subfile = '/'.join((name_inside_zip,)+subpath)
if test_only:
if subfile in zf.namelist():
return True
else:
return False
if subfile in zf.namelist():
return zf.open(subfile)
else:
raise ValueError(subfile+" not found in zip file")
else:
raise ValueError("open_subpath doesn't understand the format of the tuple passe to file_reference")
else:
raise ValueError("open_subpath doesn't understand the type of the file_reference")
return fp
| bsd-3-clause | Python |
ba370231fe80280dec806c7c2515061e8607b360 | Add SCA into mbio | wzmao/mbio,wzmao/mbio,wzmao/mbio | Correlation/__init__.py | Correlation/__init__.py | __author__ = 'Wenzhi Mao'
__all__ = []
def _Startup():
from mbio import _ABSpath
global _path__
_path__ = _ABSpath()
from os import path
Clist = ['mi.c', 'omes.c']
for c in Clist:
if not path.exists(_path__+'/'+c.replace('.c', '_c.so')):
from mbio import _make
_make(_path__+'/'+c)
_Startup()
from . import MI
from .MI import *
__all__.extend(MI.__all__)
from . import OMES
from .OMES import *
__all__.extend(OMES.__all__)
from . import SCA
from .SCA import *
__all__.extend(SCA.__all__) | __author__ = 'Wenzhi Mao'
__all__ = []
def _Startup():
from mbio import _ABSpath
global _path__
_path__ = _ABSpath()
from os import path
Clist = ['mi.c', 'omes.c']
for c in Clist:
if not path.exists(_path__+'/'+c.replace('.c', '_c.so')):
from mbio import _make
_make(_path__+'/'+c)
_Startup()
from . import MI
from .MI import *
__all__.extend(MI.__all__)
from . import OMES
from .OMES import *
__all__.extend(OMES.__all__)
| mit | Python |
2277c82efdc456e5873987eabac88810b2cece5b | Fix pep8 whitespace violation. | chauhanhardik/populo,openfun/edx-platform,gsehub/edx-platform,Edraak/circleci-edx-platform,chauhanhardik/populo_2,simbs/edx-platform,nikolas/edx-platform,mcgachey/edx-platform,rue89-tech/edx-platform,AkA84/edx-platform,synergeticsedx/deployment-wipro,4eek/edx-platform,LICEF/edx-platform,TeachAtTUM/edx-platform,JCBarahona/edX,MSOpenTech/edx-platform,eduNEXT/edx-platform,praveen-pal/edx-platform,kursitet/edx-platform,chudaol/edx-platform,zubair-arbi/edx-platform,chudaol/edx-platform,edry/edx-platform,martynovp/edx-platform,chand3040/cloud_that,nttks/jenkins-test,alexthered/kienhoc-platform,abdoosh00/edx-rtl-final,chauhanhardik/populo,doganov/edx-platform,abdoosh00/edraak,peterm-itr/edx-platform,doismellburning/edx-platform,polimediaupv/edx-platform,don-github/edx-platform,iivic/BoiseStateX,simbs/edx-platform,hamzehd/edx-platform,ferabra/edx-platform,wwj718/edx-platform,CredoReference/edx-platform,unicri/edx-platform,eestay/edx-platform,Unow/edx-platform,sameetb-cuelogic/edx-platform-test,bitifirefly/edx-platform,TsinghuaX/edx-platform,RPI-OPENEDX/edx-platform,carsongee/edx-platform,pdehaye/theming-edx-platform,unicri/edx-platform,beacloudgenius/edx-platform,lduarte1991/edx-platform,IONISx/edx-platform,Softmotions/edx-platform,shashank971/edx-platform,atsolakid/edx-platform,proversity-org/edx-platform,PepperPD/edx-pepper-platform,nttks/edx-platform,CourseTalk/edx-platform,nanolearning/edx-platform,raccoongang/edx-platform,utecuy/edx-platform,defance/edx-platform,edx-solutions/edx-platform,cyanna/edx-platform,dcosentino/edx-platform,abdoosh00/edraak,kxliugang/edx-platform,sameetb-cuelogic/edx-platform-test,chauhanhardik/populo_2,rhndg/openedx,BehavioralInsightsTeam/edx-platform,shabab12/edx-platform,xinjiguaike/edx-platform,deepsrijit1105/edx-platform,ak2703/edx-platform,unicri/edx-platform,WatanabeYasumasa/edx-platform,zofuthan/edx-platform,eemirtekin/edx-platform,wwj718/edx-platform,shubhdev/edxOnBaadal,vasyarv/edx-platform,kamalx/edx-platform,zubair-arbi/edx-platform,wwj718/edx-platform,morpheby/levelup-by,nanolearningllc/edx-platform-cypress-2,fly19890211/edx-platform,EduPepperPDTesting/pepper2013-testing,mitocw/edx-platform,itsjeyd/edx-platform,BehavioralInsightsTeam/edx-platform,romain-li/edx-platform,etzhou/edx-platform,shabab12/edx-platform,hkawasaki/kawasaki-aio8-1,xuxiao19910803/edx-platform,kxliugang/edx-platform,hamzehd/edx-platform,msegado/edx-platform,synergeticsedx/deployment-wipro,auferack08/edx-platform,openfun/edx-platform,olexiim/edx-platform,gsehub/edx-platform,deepsrijit1105/edx-platform,MakeHer/edx-platform,don-github/edx-platform,analyseuc3m/ANALYSE-v1,nttks/edx-platform,MakeHer/edx-platform,shubhdev/openedx,analyseuc3m/ANALYSE-v1,xuxiao19910803/edx-platform,prarthitm/edxplatform,deepsrijit1105/edx-platform,edry/edx-platform,benpatterson/edx-platform,gymnasium/edx-platform,doganov/edx-platform,motion2015/a3,jamiefolsom/edx-platform,J861449197/edx-platform,adoosii/edx-platform,gymnasium/edx-platform,miptliot/edx-platform,cselis86/edx-platform,chand3040/cloud_that,carsongee/edx-platform,rationalAgent/edx-platform-custom,zofuthan/edx-platform,appliedx/edx-platform,EDUlib/edx-platform,ovnicraft/edx-platform,mjirayu/sit_academy,WatanabeYasumasa/edx-platform,caesar2164/edx-platform,jamesblunt/edx-platform,rismalrv/edx-platform,stvstnfrd/edx-platform,IITBinterns13/edx-platform-dev,cyanna/edx-platform,jswope00/griffinx,Lektorium-LLC/edx-platform,playm2mboy/edx-platform,prarthitm/edxplatform,EduPepperPDTesting/pepper2013-testing,amir-qayyum-khan/edx-platform,mushtaqak/edx-platform,don-github/edx-platform,iivic/BoiseStateX,Endika/edx-platform,unicri/edx-platform,beni55/edx-platform,prarthitm/edxplatform,itsjeyd/edx-platform,jelugbo/tundex,mtlchun/edx,motion2015/edx-platform,olexiim/edx-platform,mcgachey/edx-platform,gsehub/edx-platform,dsajkl/123,zadgroup/edx-platform,jazkarta/edx-platform,ZLLab-Mooc/edx-platform,vikas1885/test1,rationalAgent/edx-platform-custom,ESOedX/edx-platform,syjeon/new_edx,mjirayu/sit_academy,praveen-pal/edx-platform,jamesblunt/edx-platform,rhndg/openedx,ampax/edx-platform,nagyistoce/edx-platform,nagyistoce/edx-platform,jolyonb/edx-platform,IONISx/edx-platform,ubc/edx-platform,appsembler/edx-platform,adoosii/edx-platform,kmoocdev2/edx-platform,nikolas/edx-platform,atsolakid/edx-platform,hkawasaki/kawasaki-aio8-2,wwj718/edx-platform,msegado/edx-platform,alu042/edx-platform,ampax/edx-platform,amir-qayyum-khan/edx-platform,ak2703/edx-platform,sameetb-cuelogic/edx-platform-test,ampax/edx-platform-backup,xuxiao19910803/edx,utecuy/edx-platform,bitifirefly/edx-platform,jswope00/griffinx,eduNEXT/edx-platform,olexiim/edx-platform,cpennington/edx-platform,ovnicraft/edx-platform,shubhdev/edxOnBaadal,naresh21/synergetics-edx-platform,procangroup/edx-platform,Kalyzee/edx-platform,ESOedX/edx-platform,jelugbo/tundex,lduarte1991/edx-platform,Edraak/circleci-edx-platform,jazkarta/edx-platform,jazztpt/edx-platform,EduPepperPD/pepper2013,CourseTalk/edx-platform,J861449197/edx-platform,IndonesiaX/edx-platform,caesar2164/edx-platform,zhenzhai/edx-platform,nanolearningllc/edx-platform-cypress,franosincic/edx-platform,dkarakats/edx-platform,mitocw/edx-platform,inares/edx-platform,jelugbo/tundex,louyihua/edx-platform,raccoongang/edx-platform,DefyVentures/edx-platform,Shrhawk/edx-platform,knehez/edx-platform,miptliot/edx-platform,tanmaykm/edx-platform,apigee/edx-platform,motion2015/a3,yokose-ks/edx-platform,Softmotions/edx-platform,nttks/jenkins-test,dkarakats/edx-platform,knehez/edx-platform,ZLLab-Mooc/edx-platform,CredoReference/edx-platform,don-github/edx-platform,marcore/edx-platform,nagyistoce/edx-platform,pelikanchik/edx-platform,doismellburning/edx-platform,arifsetiawan/edx-platform,pelikanchik/edx-platform,mbareta/edx-platform-ft,mitocw/edx-platform,mahendra-r/edx-platform,alexthered/kienhoc-platform,Unow/edx-platform,vismartltd/edx-platform,devs1991/test_edx_docmode,pomegranited/edx-platform,pabloborrego93/edx-platform,jazkarta/edx-platform,UOMx/edx-platform,jbzdak/edx-platform,IONISx/edx-platform,chand3040/cloud_that,zubair-arbi/edx-platform,leansoft/edx-platform,analyseuc3m/ANALYSE-v1,mjirayu/sit_academy,edx/edx-platform,10clouds/edx-platform,inares/edx-platform,vasyarv/edx-platform,arifsetiawan/edx-platform,leansoft/edx-platform,kmoocdev/edx-platform,UOMx/edx-platform,pepeportela/edx-platform,edx/edx-platform,olexiim/edx-platform,kalebhartje/schoolboost,ampax/edx-platform-backup,jamiefolsom/edx-platform,analyseuc3m/ANALYSE-v1,lduarte1991/edx-platform,10clouds/edx-platform,antoviaque/edx-platform,y12uc231/edx-platform,shubhdev/edx-platform,alu042/edx-platform,chauhanhardik/populo,JCBarahona/edX,beni55/edx-platform,raccoongang/edx-platform,mushtaqak/edx-platform,MSOpenTech/edx-platform,defance/edx-platform,procangroup/edx-platform,zhenzhai/edx-platform,jelugbo/tundex,morpheby/levelup-by,dcosentino/edx-platform,syjeon/new_edx,tiagochiavericosta/edx-platform,bdero/edx-platform,SivilTaram/edx-platform,Livit/Livit.Learn.EdX,defance/edx-platform,msegado/edx-platform,pdehaye/theming-edx-platform,rismalrv/edx-platform,kmoocdev/edx-platform,hastexo/edx-platform,mushtaqak/edx-platform,abdoosh00/edraak,romain-li/edx-platform,vasyarv/edx-platform,sudheerchintala/LearnEraPlatForm,Edraak/circleci-edx-platform,a-parhom/edx-platform,y12uc231/edx-platform,ahmadio/edx-platform,Endika/edx-platform,zerobatu/edx-platform,franosincic/edx-platform,cselis86/edx-platform,iivic/BoiseStateX,jswope00/GAI,synergeticsedx/deployment-wipro,UXE/local-edx,edx-solutions/edx-platform,torchingloom/edx-platform,bigdatauniversity/edx-platform,morenopc/edx-platform,eduNEXT/edx-platform,BehavioralInsightsTeam/edx-platform,jswope00/GAI,antonve/s4-project-mooc,carsongee/edx-platform,DefyVentures/edx-platform,ahmedaljazzar/edx-platform,antonve/s4-project-mooc,vikas1885/test1,ferabra/edx-platform,Kalyzee/edx-platform,angelapper/edx-platform,ovnicraft/edx-platform,B-MOOC/edx-platform,romain-li/edx-platform,xingyepei/edx-platform,CredoReference/edx-platform,Endika/edx-platform,apigee/edx-platform,CourseTalk/edx-platform,tanmaykm/edx-platform,TeachAtTUM/edx-platform,edry/edx-platform,xuxiao19910803/edx,TsinghuaX/edx-platform,xuxiao19910803/edx,MSOpenTech/edx-platform,mushtaqak/edx-platform,stvstnfrd/edx-platform,IndonesiaX/edx-platform,AkA84/edx-platform,prarthitm/edxplatform,hkawasaki/kawasaki-aio8-1,praveen-pal/edx-platform,philanthropy-u/edx-platform,cselis86/edx-platform,miptliot/edx-platform,JCBarahona/edX,vikas1885/test1,beni55/edx-platform,EduPepperPDTesting/pepper2013-testing,cognitiveclass/edx-platform,knehez/edx-platform,ahmadiga/min_edx,eduNEXT/edx-platform,edx-solutions/edx-platform,eemirtekin/edx-platform,Edraak/edraak-platform,jazkarta/edx-platform-for-isc,teltek/edx-platform,ahmadio/edx-platform,chauhanhardik/populo_2,adoosii/edx-platform,pku9104038/edx-platform,etzhou/edx-platform,ahmadio/edx-platform,SravanthiSinha/edx-platform,eduNEXT/edunext-platform,pelikanchik/edx-platform,longmen21/edx-platform,a-parhom/edx-platform,jjmiranda/edx-platform,cpennington/edx-platform,dkarakats/edx-platform,doismellburning/edx-platform,OmarIthawi/edx-platform,chudaol/edx-platform,Softmotions/edx-platform,jjmiranda/edx-platform,solashirai/edx-platform,proversity-org/edx-platform,vasyarv/edx-platform,morpheby/levelup-by,hamzehd/edx-platform,valtech-mooc/edx-platform,syjeon/new_edx,CredoReference/edx-platform,nikolas/edx-platform,cecep-edu/edx-platform,fly19890211/edx-platform,jelugbo/tundex,sameetb-cuelogic/edx-platform-test,franosincic/edx-platform,proversity-org/edx-platform,shurihell/testasia,TsinghuaX/edx-platform,sudheerchintala/LearnEraPlatForm,Kalyzee/edx-platform,cognitiveclass/edx-platform,ahmedaljazzar/edx-platform,arbrandes/edx-platform,JioEducation/edx-platform,bdero/edx-platform,alexthered/kienhoc-platform,DNFcode/edx-platform,ahmadiga/min_edx,bigdatauniversity/edx-platform,pdehaye/theming-edx-platform,jamiefolsom/edx-platform,CourseTalk/edx-platform,ovnicraft/edx-platform,devs1991/test_edx_docmode,amir-qayyum-khan/edx-platform,EduPepperPDTesting/pepper2013-testing,rhndg/openedx,LICEF/edx-platform,mjirayu/sit_academy,philanthropy-u/edx-platform,jazkarta/edx-platform-for-isc,LICEF/edx-platform,tiagochiavericosta/edx-platform,mtlchun/edx,Semi-global/edx-platform,nttks/jenkins-test,jswope00/griffinx,IndonesiaX/edx-platform,simbs/edx-platform,hkawasaki/kawasaki-aio8-0,shubhdev/openedx,philanthropy-u/edx-platform,rationalAgent/edx-platform-custom,andyzsf/edx,eestay/edx-platform,zhenzhai/edx-platform,Edraak/edx-platform,beacloudgenius/edx-platform,halvertoluke/edx-platform,AkA84/edx-platform,unicri/edx-platform,waheedahmed/edx-platform,syjeon/new_edx,jonathan-beard/edx-platform,longmen21/edx-platform,jolyonb/edx-platform,shubhdev/edxOnBaadal,apigee/edx-platform,dsajkl/123,jbassen/edx-platform,procangroup/edx-platform,xinjiguaike/edx-platform,inares/edx-platform,xuxiao19910803/edx-platform,peterm-itr/edx-platform,eestay/edx-platform,Semi-global/edx-platform,chauhanhardik/populo_2,mahendra-r/edx-platform,atsolakid/edx-platform,ZLLab-Mooc/edx-platform,jruiperezv/ANALYSE,TeachAtTUM/edx-platform,xinjiguaike/edx-platform,naresh21/synergetics-edx-platform,nanolearning/edx-platform,kursitet/edx-platform,rhndg/openedx,torchingloom/edx-platform,EduPepperPD/pepper2013,nanolearningllc/edx-platform-cypress,iivic/BoiseStateX,Shrhawk/edx-platform,ampax/edx-platform,hmcmooc/muddx-platform,jbzdak/edx-platform,itsjeyd/edx-platform,kmoocdev2/edx-platform,jzoldak/edx-platform,bitifirefly/edx-platform,bitifirefly/edx-platform,valtech-mooc/edx-platform,dkarakats/edx-platform,PepperPD/edx-pepper-platform,UXE/local-edx,bigdatauniversity/edx-platform,arifsetiawan/edx-platform,LearnEra/LearnEraPlaftform,wwj718/ANALYSE,jamesblunt/edx-platform,jbzdak/edx-platform,morenopc/edx-platform,jonathan-beard/edx-platform,B-MOOC/edx-platform,hkawasaki/kawasaki-aio8-0,chauhanhardik/populo,nanolearningllc/edx-platform-cypress-2,shubhdev/edxOnBaadal,arbrandes/edx-platform,4eek/edx-platform,shashank971/edx-platform,rue89-tech/edx-platform,shubhdev/edx-platform,hkawasaki/kawasaki-aio8-1,halvertoluke/edx-platform,msegado/edx-platform,mcgachey/edx-platform,teltek/edx-platform,antonve/s4-project-mooc,playm2mboy/edx-platform,antoviaque/edx-platform,rue89-tech/edx-platform,jruiperezv/ANALYSE,chauhanhardik/populo_2,dsajkl/123,shashank971/edx-platform,dkarakats/edx-platform,nanolearningllc/edx-platform-cypress-2,abdoosh00/edraak,Lektorium-LLC/edx-platform,waheedahmed/edx-platform,shashank971/edx-platform,etzhou/edx-platform,jazztpt/edx-platform,Unow/edx-platform,SivilTaram/edx-platform,stvstnfrd/edx-platform,nttks/edx-platform,franosincic/edx-platform,Edraak/edx-platform,jamiefolsom/edx-platform,mtlchun/edx,mbareta/edx-platform-ft,peterm-itr/edx-platform,appsembler/edx-platform,jbzdak/edx-platform,nttks/edx-platform,UXE/local-edx,nanolearningllc/edx-platform-cypress-2,Edraak/edraak-platform,cecep-edu/edx-platform,Softmotions/edx-platform,louyihua/edx-platform,shurihell/testasia,Livit/Livit.Learn.EdX,cognitiveclass/edx-platform,a-parhom/edx-platform,ZLLab-Mooc/edx-platform,Ayub-Khan/edx-platform,halvertoluke/edx-platform,RPI-OPENEDX/edx-platform,cecep-edu/edx-platform,mitocw/edx-platform,nanolearningllc/edx-platform-cypress,ahmedaljazzar/edx-platform,ak2703/edx-platform,xinjiguaike/edx-platform,hkawasaki/kawasaki-aio8-0,leansoft/edx-platform,auferack08/edx-platform,devs1991/test_edx_docmode,kmoocdev/edx-platform,vasyarv/edx-platform,shubhdev/openedx,AkA84/edx-platform,martynovp/edx-platform,SivilTaram/edx-platform,nanolearning/edx-platform,auferack08/edx-platform,mushtaqak/edx-platform,kalebhartje/schoolboost,pelikanchik/edx-platform,Edraak/edraak-platform,arbrandes/edx-platform,shabab12/edx-platform,proversity-org/edx-platform,zubair-arbi/edx-platform,kxliugang/edx-platform,sudheerchintala/LearnEraPlatForm,dsajkl/reqiop,eduNEXT/edunext-platform,fly19890211/edx-platform,jbzdak/edx-platform,jzoldak/edx-platform,itsjeyd/edx-platform,msegado/edx-platform,mjg2203/edx-platform-seas,LearnEra/LearnEraPlaftform,polimediaupv/edx-platform,cyanna/edx-platform,kmoocdev2/edx-platform,marcore/edx-platform,morenopc/edx-platform,DefyVentures/edx-platform,abdoosh00/edx-rtl-final,Lektorium-LLC/edx-platform,SivilTaram/edx-platform,cselis86/edx-platform,EduPepperPD/pepper2013,solashirai/edx-platform,inares/edx-platform,dcosentino/edx-platform,edx/edx-platform,procangroup/edx-platform,Stanford-Online/edx-platform,kmoocdev2/edx-platform,arbrandes/edx-platform,simbs/edx-platform,4eek/edx-platform,peterm-itr/edx-platform,utecuy/edx-platform,bigdatauniversity/edx-platform,olexiim/edx-platform,mjg2203/edx-platform-seas,EduPepperPD/pepper2013,Unow/edx-platform,Livit/Livit.Learn.EdX,y12uc231/edx-platform,jswope00/GAI,inares/edx-platform,zadgroup/edx-platform,DNFcode/edx-platform,hkawasaki/kawasaki-aio8-2,dsajkl/reqiop,TsinghuaX/edx-platform,mcgachey/edx-platform,OmarIthawi/edx-platform,doganov/edx-platform,rismalrv/edx-platform,IITBinterns13/edx-platform-dev,MakeHer/edx-platform,mahendra-r/edx-platform,Semi-global/edx-platform,dsajkl/reqiop,ferabra/edx-platform,ampax/edx-platform-backup,bdero/edx-platform,jazztpt/edx-platform,xingyepei/edx-platform,mjg2203/edx-platform-seas,polimediaupv/edx-platform,adoosii/edx-platform,shurihell/testasia,wwj718/ANALYSE,Stanford-Online/edx-platform,pabloborrego93/edx-platform,JioEducation/edx-platform,etzhou/edx-platform,devs1991/test_edx_docmode,cyanna/edx-platform,pomegranited/edx-platform,pepeportela/edx-platform,WatanabeYasumasa/edx-platform,yokose-ks/edx-platform,naresh21/synergetics-edx-platform,hmcmooc/muddx-platform,halvertoluke/edx-platform,4eek/edx-platform,ovnicraft/edx-platform,yokose-ks/edx-platform,hastexo/edx-platform,beacloudgenius/edx-platform,mtlchun/edx,xingyepei/edx-platform,dcosentino/edx-platform,jazztpt/edx-platform,ak2703/edx-platform,PepperPD/edx-pepper-platform,10clouds/edx-platform,eemirtekin/edx-platform,J861449197/edx-platform,torchingloom/edx-platform,hkawasaki/kawasaki-aio8-2,doismellburning/edx-platform,SravanthiSinha/edx-platform,Stanford-Online/edx-platform,simbs/edx-platform,hkawasaki/kawasaki-aio8-0,devs1991/test_edx_docmode,cecep-edu/edx-platform,Edraak/edx-platform,OmarIthawi/edx-platform,zadgroup/edx-platform,etzhou/edx-platform,ahmadiga/min_edx,rismalrv/edx-platform,xinjiguaike/edx-platform,beni55/edx-platform,defance/edx-platform,zhenzhai/edx-platform,kamalx/edx-platform,kursitet/edx-platform,wwj718/ANALYSE,cpennington/edx-platform,ampax/edx-platform,Edraak/edraak-platform,motion2015/edx-platform,don-github/edx-platform,Shrhawk/edx-platform,fintech-circle/edx-platform,Edraak/circleci-edx-platform,jjmiranda/edx-platform,JioEducation/edx-platform,waheedahmed/edx-platform,devs1991/test_edx_docmode,openfun/edx-platform,tiagochiavericosta/edx-platform,yokose-ks/edx-platform,dsajkl/123,J861449197/edx-platform,tanmaykm/edx-platform,morenopc/edx-platform,JCBarahona/edX,zadgroup/edx-platform,torchingloom/edx-platform,Ayub-Khan/edx-platform,vikas1885/test1,hmcmooc/muddx-platform,alu042/edx-platform,xuxiao19910803/edx-platform,chrisndodge/edx-platform,nanolearningllc/edx-platform-cypress-2,hastexo/edx-platform,openfun/edx-platform,marcore/edx-platform,torchingloom/edx-platform,tanmaykm/edx-platform,Ayub-Khan/edx-platform,Softmotions/edx-platform,hamzehd/edx-platform,benpatterson/edx-platform,motion2015/a3,IONISx/edx-platform,LICEF/edx-platform,appliedx/edx-platform,mbareta/edx-platform-ft,Stanford-Online/edx-platform,jzoldak/edx-platform,Livit/Livit.Learn.EdX,UXE/local-edx,cyanna/edx-platform,hkawasaki/kawasaki-aio8-1,rationalAgent/edx-platform-custom,kalebhartje/schoolboost,wwj718/edx-platform,nikolas/edx-platform,RPI-OPENEDX/edx-platform,kamalx/edx-platform,IONISx/edx-platform,jruiperezv/ANALYSE,motion2015/edx-platform,eemirtekin/edx-platform,BehavioralInsightsTeam/edx-platform,devs1991/test_edx_docmode,EDUlib/edx-platform,solashirai/edx-platform,nttks/jenkins-test,zubair-arbi/edx-platform,nanolearning/edx-platform,beni55/edx-platform,xuxiao19910803/edx,Endika/edx-platform,zofuthan/edx-platform,polimediaupv/edx-platform,gymnasium/edx-platform,louyihua/edx-platform,kmoocdev/edx-platform,antonve/s4-project-mooc,ferabra/edx-platform,angelapper/edx-platform,J861449197/edx-platform,pomegranited/edx-platform,vismartltd/edx-platform,kalebhartje/schoolboost,sameetb-cuelogic/edx-platform-test,jzoldak/edx-platform,knehez/edx-platform,kmoocdev2/edx-platform,fintech-circle/edx-platform,praveen-pal/edx-platform,zerobatu/edx-platform,jruiperezv/ANALYSE,jamesblunt/edx-platform,doganov/edx-platform,zofuthan/edx-platform,martynovp/edx-platform,raccoongang/edx-platform,shurihell/testasia,appsembler/edx-platform,DefyVentures/edx-platform,chand3040/cloud_that,morpheby/levelup-by,teltek/edx-platform,ampax/edx-platform-backup,eduNEXT/edunext-platform,JCBarahona/edX,solashirai/edx-platform,kamalx/edx-platform,antonve/s4-project-mooc,nanolearningllc/edx-platform-cypress,Lektorium-LLC/edx-platform,SravanthiSinha/edx-platform,jazkarta/edx-platform-for-isc,waheedahmed/edx-platform,tiagochiavericosta/edx-platform,longmen21/edx-platform,xuxiao19910803/edx,solashirai/edx-platform,dsajkl/123,nttks/edx-platform,pepeportela/edx-platform,AkA84/edx-platform,chrisndodge/edx-platform,halvertoluke/edx-platform,Semi-global/edx-platform,eduNEXT/edunext-platform,mahendra-r/edx-platform,jamesblunt/edx-platform,UOMx/edx-platform,gsehub/edx-platform,JioEducation/edx-platform,jolyonb/edx-platform,jazkarta/edx-platform,zofuthan/edx-platform,chauhanhardik/populo,benpatterson/edx-platform,jswope00/griffinx,kamalx/edx-platform,alexthered/kienhoc-platform,ahmedaljazzar/edx-platform,shubhdev/edx-platform,sudheerchintala/LearnEraPlatForm,chrisndodge/edx-platform,deepsrijit1105/edx-platform,B-MOOC/edx-platform,pabloborrego93/edx-platform,DefyVentures/edx-platform,wwj718/ANALYSE,ubc/edx-platform,valtech-mooc/edx-platform,jazkarta/edx-platform,shubhdev/edxOnBaadal,TeachAtTUM/edx-platform,EDUlib/edx-platform,dsajkl/reqiop,benpatterson/edx-platform,adoosii/edx-platform,hkawasaki/kawasaki-aio8-2,fintech-circle/edx-platform,gymnasium/edx-platform,xingyepei/edx-platform,10clouds/edx-platform,ampax/edx-platform-backup,MSOpenTech/edx-platform,vismartltd/edx-platform,Edraak/edx-platform,fintech-circle/edx-platform,jonathan-beard/edx-platform,mjg2203/edx-platform-seas,jbassen/edx-platform,atsolakid/edx-platform,jonathan-beard/edx-platform,jamiefolsom/edx-platform,pdehaye/theming-edx-platform,nagyistoce/edx-platform,PepperPD/edx-pepper-platform,edx-solutions/edx-platform,ubc/edx-platform,shubhdev/edx-platform,LICEF/edx-platform,vismartltd/edx-platform,MakeHer/edx-platform,naresh21/synergetics-edx-platform,pku9104038/edx-platform,IndonesiaX/edx-platform,fly19890211/edx-platform,fly19890211/edx-platform,ESOedX/edx-platform,motion2015/edx-platform,tiagochiavericosta/edx-platform,motion2015/a3,doismellburning/edx-platform,DNFcode/edx-platform,EduPepperPDTesting/pepper2013-testing,Ayub-Khan/edx-platform,MakeHer/edx-platform,lduarte1991/edx-platform,B-MOOC/edx-platform,mcgachey/edx-platform,cpennington/edx-platform,motion2015/edx-platform,appliedx/edx-platform,andyzsf/edx,arifsetiawan/edx-platform,valtech-mooc/edx-platform,antoviaque/edx-platform,playm2mboy/edx-platform,apigee/edx-platform,zerobatu/edx-platform,jswope00/griffinx,y12uc231/edx-platform,yokose-ks/edx-platform,appliedx/edx-platform,beacloudgenius/edx-platform,Edraak/edx-platform,cselis86/edx-platform,amir-qayyum-khan/edx-platform,cognitiveclass/edx-platform,ubc/edx-platform,LearnEra/LearnEraPlaftform,playm2mboy/edx-platform,jazztpt/edx-platform,y12uc231/edx-platform,teltek/edx-platform,RPI-OPENEDX/edx-platform,abdoosh00/edx-rtl-final,nttks/jenkins-test,ahmadiga/min_edx,kursitet/edx-platform,zadgroup/edx-platform,zerobatu/edx-platform,4eek/edx-platform,playm2mboy/edx-platform,leansoft/edx-platform,rue89-tech/edx-platform,SravanthiSinha/edx-platform,ferabra/edx-platform,PepperPD/edx-pepper-platform,morenopc/edx-platform,doganov/edx-platform,ahmadio/edx-platform,caesar2164/edx-platform,EduPepperPD/pepper2013,eestay/edx-platform,pomegranited/edx-platform,hastexo/edx-platform,IITBinterns13/edx-platform-dev,cecep-edu/edx-platform,appsembler/edx-platform,SivilTaram/edx-platform,motion2015/a3,Ayub-Khan/edx-platform,appliedx/edx-platform,DNFcode/edx-platform,nikolas/edx-platform,pku9104038/edx-platform,rationalAgent/edx-platform-custom,zhenzhai/edx-platform,knehez/edx-platform,franosincic/edx-platform,longmen21/edx-platform,ESOedX/edx-platform,EduPepperPDTesting/pepper2013-testing,pomegranited/edx-platform,beacloudgenius/edx-platform,mbareta/edx-platform-ft,benpatterson/edx-platform,rhndg/openedx,nanolearning/edx-platform,eestay/edx-platform,shubhdev/openedx,UOMx/edx-platform,polimediaupv/edx-platform,ak2703/edx-platform,antoviaque/edx-platform,RPI-OPENEDX/edx-platform,abdoosh00/edx-rtl-final,cognitiveclass/edx-platform,chand3040/cloud_that,valtech-mooc/edx-platform,bitifirefly/edx-platform,kursitet/edx-platform,alu042/edx-platform,IITBinterns13/edx-platform-dev,hamzehd/edx-platform,longmen21/edx-platform,shabab12/edx-platform,DNFcode/edx-platform,ahmadiga/min_edx,WatanabeYasumasa/edx-platform,philanthropy-u/edx-platform,carsongee/edx-platform,jswope00/GAI,edry/edx-platform,martynovp/edx-platform,dcosentino/edx-platform,IndonesiaX/edx-platform,eemirtekin/edx-platform,mahendra-r/edx-platform,angelapper/edx-platform,stvstnfrd/edx-platform,edry/edx-platform,MSOpenTech/edx-platform,jruiperezv/ANALYSE,xuxiao19910803/edx-platform,mtlchun/edx,ahmadio/edx-platform,shubhdev/edx-platform,nagyistoce/edx-platform,xingyepei/edx-platform,mjirayu/sit_academy,shubhdev/openedx,waheedahmed/edx-platform,angelapper/edx-platform,shurihell/testasia,caesar2164/edx-platform,synergeticsedx/deployment-wipro,jbassen/edx-platform,iivic/BoiseStateX,vikas1885/test1,kalebhartje/schoolboost,Shrhawk/edx-platform,pku9104038/edx-platform,andyzsf/edx,romain-li/edx-platform,kxliugang/edx-platform,jolyonb/edx-platform,Kalyzee/edx-platform,Shrhawk/edx-platform,OmarIthawi/edx-platform,pabloborrego93/edx-platform,openfun/edx-platform,atsolakid/edx-platform,auferack08/edx-platform,jazkarta/edx-platform-for-isc,leansoft/edx-platform,ubc/edx-platform,jonathan-beard/edx-platform,nanolearningllc/edx-platform-cypress,chudaol/edx-platform,jbassen/edx-platform,alexthered/kienhoc-platform,shashank971/edx-platform,jbassen/edx-platform,chrisndodge/edx-platform,LearnEra/LearnEraPlaftform,pepeportela/edx-platform,arifsetiawan/edx-platform,bigdatauniversity/edx-platform,devs1991/test_edx_docmode,vismartltd/edx-platform,hmcmooc/muddx-platform,bdero/edx-platform,a-parhom/edx-platform,andyzsf/edx,B-MOOC/edx-platform,marcore/edx-platform,martynovp/edx-platform,utecuy/edx-platform,louyihua/edx-platform,SravanthiSinha/edx-platform,edx/edx-platform,Edraak/circleci-edx-platform,Semi-global/edx-platform,wwj718/ANALYSE,rue89-tech/edx-platform,romain-li/edx-platform,rismalrv/edx-platform,jjmiranda/edx-platform,kxliugang/edx-platform,jazkarta/edx-platform-for-isc,utecuy/edx-platform,zerobatu/edx-platform,ZLLab-Mooc/edx-platform,Kalyzee/edx-platform,EDUlib/edx-platform,chudaol/edx-platform,miptliot/edx-platform,kmoocdev/edx-platform | lms/djangoapps/courseware/features/video.py | lms/djangoapps/courseware/features/video.py | #pylint: disable=C0111
from lettuce import world, step
from common import *
############### ACTIONS ####################
@step('when I view it it does autoplay')
def does_autoplay(step):
assert(world.css_find('.video')[0]['data-autoplay'] == 'True')
@step('the course has a Video component')
def view_video(step):
coursename = TEST_COURSE_NAME.replace(' ', '_')
i_am_registered_for_the_course(step, coursename)
# Make sure we have a video
video = add_video_to_course(coursename)
chapter_name = TEST_SECTION_NAME.replace(" ", "_")
section_name = chapter_name
url = django_url('/courses/edx/Test_Course/Test_Course/courseware/%s/%s' %
(chapter_name, section_name))
world.browser.visit(url)
def add_video_to_course(course):
template_name = 'i4x://edx/templates/video/default'
world.ItemFactory.create(parent_location=section_location(course),
template=template_name,
display_name='Video')
| #pylint: disable=C0111
from lettuce import world, step
from common import *
############### ACTIONS ####################
@step('when I view it it does autoplay')
def does_autoplay(step):
assert(world.css_find('.video')[0]['data-autoplay'] == 'True')
@step('the course has a Video component')
def view_video(step):
coursename = TEST_COURSE_NAME.replace(' ', '_')
i_am_registered_for_the_course(step, coursename)
# Make sure we have a video
video = add_video_to_course(coursename)
chapter_name = TEST_SECTION_NAME.replace(" ", "_")
section_name = chapter_name
url = django_url('/courses/edx/Test_Course/Test_Course/courseware/%s/%s' %
(chapter_name, section_name))
world.browser.visit(url)
def add_video_to_course(course):
template_name = 'i4x://edx/templates/video/default'
world.ItemFactory.create(parent_location=section_location(course),
template=template_name,
display_name='Video')
| agpl-3.0 | Python |
54b0feebb18816a936f4a7f323a77808f9973eb2 | Update testes.py | kauaramirez/devops-aula05 | Src/testes.py | Src/testes.py | import jogovelha
import sys
erroInicializar = False
jogo = jogovelha.inicializar()
if len(jogo) != 3:
erroInicializar = True
else:
for linha in jogo:
if len(linha) != 3:
erroInicializar = True
else:
for elemento in linha:
if elemento != "X":
erroInicializar = True
if erroInicializar:
sys.exit(1)
else:
sys.exit(0)
| import jogovelha
import sys
erroInicializar = False
jogo = jogovelha.inicializar()
if len(jogo) != 3:
erroInicializar = True
else:
for linha in jogo:
if len(linha) != 3:
erroInicializar = True
else:
for elemento in linha:
if elemento != ".":
erroInicializar = True
if erroInicializar:
sys.exit(1)
else:
sys.exit(0) | apache-2.0 | Python |
73ceff96b2f065517a7d67cb0b25361f5bd61388 | Delete fixture after running tests | cpsaltis/pythogram-core | src/gramcore/filters/tests/test_edges.py | src/gramcore/filters/tests/test_edges.py | """Tests for module gramcore.filters.edges"""
import os
import numpy
from PIL import Image, ImageDraw
from nose.tools import assert_equal
from skimage import io
from gramcore.filters import edges
def setup():
"""Create image fixture
The background color is set by default to black (value == 0).
.. note::
Although the rectangle should be 10x10 in reality it returns an 11x11.
If the image is read with io.imread, then the colored pixels and their
neighbours can be accessed with arr[9:22, 4:17].
"""
img = Image.new('L', (20, 40))
draw = ImageDraw.Draw(img)
draw.rectangle([(5, 10), (15, 20)], fill=255)
img.save('white-square.tif')
del draw
def teardown():
"""Delete fixture"""
os.remove('white-square.tif')
def test_canny():
"""Apply canny to grey image and check return values
.. warning::
This seems to produce some artifacts. The fixture is a black
image with a white 11x11 rectangle. Thus you expect you get 44 (4*11)
pixels of edges. Instead it gets 50, when sigma is 1 and 40 when sigma
is 2. In both cases the shape is not correct.
"""
img = io.imread('white-square.tif')
parameters = {'data': [img], 'sigma': 1.0}
result = edges.canny(parameters)
# this should be 44 check the resulting image with
#result *= 255
#io.imsave('result.tif', result)
assert_equal(result.sum(), 50)
def test_prewitt():
"""Apply prewitt to grey image and check return values
.. note::
This produces correct shape though it shrinks it by 2 pixels, there
are no edge pixels on the corners and each edge has a width of 2
pixels. Based on the original rectangle size, which is 11x11, and the
above issues it returns 4*9*2 = 72 edge pixels.
"""
img = io.imread('white-square.tif')
parameters = {'data': [img]}
result = edges.prewitt(parameters)
result = result.astype('uint8')
assert_equal(result.sum(), 72)
def test_sobel():
"""Apply sobel to grey image and check return values
.. note::
This produces correct shape though it shrinks it by 2 pixels and each
edge has a width of 2 pixels. Based on the original rectangle size,
which is 11x11, and the above issues it returns 4*9*2 + 4 = 76 edge
pixels.
"""
img = io.imread('white-square.tif')
parameters = {'data': [img]}
result = edges.sobel(parameters)
result = result.astype('uint8')
assert_equal(result.sum(), 76)
| """Tests for module gramcore.filters.edges"""
import os
import numpy
from PIL import Image, ImageDraw
from nose.tools import assert_equal
from skimage import io
from gramcore.filters import edges
def setup():
"""Create image fixture
The background color is set by default to black (value == 0).
.. note::
Although the rectangle should be 10x10 in reality it returns an 11x11.
If the image is read with io.imread, then the colored pixels and their
neighbours can be accessed with arr[9:22, 4:17].
"""
img = Image.new('L', (20, 40))
draw = ImageDraw.Draw(img)
draw.rectangle([(5, 10), (15, 20)], fill=255)
img.save('white-square.tif')
del draw
def teardown():
"""Delete fixture"""
#os.remove('white-square.tif')
def test_canny():
"""Apply canny to grey image and check return values
.. warning::
This seems to produce some artifacts. The fixture is a black
image with a white 11x11 rectangle. Thus you expect you get 44 (4*11)
pixels of edges. Instead it gets 50, when sigma is 1 and 40 when sigma
is 2. In both cases the shape is not correct.
"""
img = io.imread('white-square.tif')
parameters = {'data': [img], 'sigma': 1.0}
result = edges.canny(parameters)
# this should be 44 check the resulting image with
#result *= 255
#io.imsave('result.tif', result)
assert_equal(result.sum(), 50)
def test_prewitt():
"""Apply prewitt to grey image and check return values
.. note::
This produces correct shape though it shrinks it by 2 pixels, there
are no edge pixels on the corners and each edge has a width of 2
pixels. Based on the original rectangle size, which is 11x11, and the
above issues it returns 4*9*2 = 72 edge pixels.
"""
img = io.imread('white-square.tif')
parameters = {'data': [img]}
result = edges.prewitt(parameters)
result = result.astype('uint8')
assert_equal(result.sum(), 72)
def test_sobel():
"""Apply sobel to grey image and check return values
.. note::
This produces correct shape though it shrinks it by 2 pixels and each
edge has a width of 2 pixels. Based on the original rectangle size,
which is 11x11, and the above issues it returns 4*9*2 + 4 = 76 edge
pixels.
"""
img = io.imread('white-square.tif')
parameters = {'data': [img]}
result = edges.sobel(parameters)
result = result.astype('uint8')
assert_equal(result.sum(), 76)
| mit | Python |
bd8caf6ab48bb1fbefdced7f33edabbdf017894a | Change of names | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | Demo/sockets/echosvr.py | Demo/sockets/echosvr.py | #! /usr/local/python
# Python implementation of an 'echo' tcp server: echo all data it receives.
#
# This is the simplest possible server, sevicing a single request only.
import sys
from socket import *
# The standard echo port isn't very useful, it requires root permissions!
# ECHO_PORT = 7
ECHO_PORT = 50000 + 7
BUFSIZE = 1024
def main():
if len(sys.argv) > 1:
port = int(eval(sys.argv[1]))
else:
port = ECHO_PORT
s = socket(AF_INET, SOCK_STREAM)
s.bind('', port)
s.listen(0)
conn, (remotehost, remoteport) = s.accept()
print 'connected by', remotehost, remoteport
while 1:
data = conn.recv(BUFSIZE)
if not data:
break
conn.send(data)
main()
| #! /usr/local/python
# Python implementation of an 'echo' tcp server: echo all data it receives.
#
# This is the simplest possible server, sevicing a single request only.
import sys
from socket import *
# The standard echo port isn't very useful, it requires root permissions!
# ECHO_PORT = 7
ECHO_PORT = 50000 + 7
BUFSIZE = 1024
def main():
if len(sys.argv) > 1:
port = int(eval(sys.argv[1]))
else:
port = ECHO_PORT
s = socket(AF_INET, SOCK_STREAM)
s.bind('', port)
s.listen(0)
conn, (host, remoteport) = s.accept()
print 'connected by', host, remoteport
while 1:
data = conn.recv(BUFSIZE)
if not data:
break
conn.send(data)
main()
| mit | Python |
dac5de12f9e775d50bb1e441016ae9625052e149 | expand ALLOWED_HOSTS | uktrade/navigator,uktrade/navigator,uktrade/navigator,uktrade/navigator | app/navigator/settings/prod.py | app/navigator/settings/prod.py | from .base import *
DEBUG = False
# As the app is running behind a host-based router supplied by Heroku or other
# PaaS, we can open ALLOWED_HOSTS
ALLOWED_HOSTS = ['*']
INSTALLED_APPS += [
'raven.contrib.django.raven_compat'
]
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 31536000
SESSION_COOKIE_SECURE = True
CSRF_COOKIE_SECURE = True
# Activity Stream API
ACTIVITY_STREAM_ACCESS_KEY_ID = env.str('ACTIVITY_STREAM_ACCESS_KEY_ID')
ACTIVITY_STREAM_SECRET_ACCESS_KEY = env.str('ACTIVITY_STREAM_SECRET_ACCESS_KEY')
| from .base import *
DEBUG = False
ALLOWED_HOSTS = [
'selling-online-overseas.export.great.gov.uk',
'navigator.cloudapps.digital',
'navigator.london.cloudapps.digital',
'www.great.gov.uk']
INSTALLED_APPS += [
'raven.contrib.django.raven_compat'
]
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 31536000
SESSION_COOKIE_SECURE = True
CSRF_COOKIE_SECURE = True
# Activity Stream API
ACTIVITY_STREAM_ACCESS_KEY_ID = env.str('ACTIVITY_STREAM_ACCESS_KEY_ID')
ACTIVITY_STREAM_SECRET_ACCESS_KEY = env.str('ACTIVITY_STREAM_SECRET_ACCESS_KEY')
| mit | Python |
64804965e031f365937ef8fe70dc749c4532053d | fix abstract scraper, can't use lxml's url parsing because we need a custom user agent | texastribune/the-dp,texastribune/the-dp,texastribune/the-dp,texastribune/the-dp | tx_highered/scripts/initial_wikipedia.py | tx_highered/scripts/initial_wikipedia.py | #! /usr/bin/env python
try:
from django.utils.timezone import now
except ImportError:
from datetime.datetime import now
import requests
from lxml.html import document_fromstring, tostring
from tx_highered.models import Institution
def get_wiki_title(name):
endpoint = "http://en.wikipedia.org/w/api.php"
params = dict(action="opensearch",
search=name,
limit=1,
namespace=0,
format="json",)
r = requests.get(endpoint, params=params)
try:
_, results = r.json
title = results[0]
except IndexError:
return None
return title
def get_wiki_abstract(url):
r = requests.get(url, headers={'User-Agent': 'thedp-scraper/0.1alpha'})
doc = document_fromstring(r.text)
root = doc
try:
toc = root.get_element_by_id('toc')
except KeyError:
return None
abstract = []
for elem in toc.getparent().iterchildren():
if elem == toc:
break
if elem.tag == 'p':
elem.make_links_absolute(url)
abstract.append(tostring(elem))
return "\n".join(abstract).strip()
def main():
queryset = Institution.objects.filter(institution_type='uni')
qs = queryset.filter(wikipedia_title__isnull=True)
for inst in qs:
title = get_wiki_title(inst.name)
if title:
inst.wikipedia_title = title
inst.save()
print inst.name + " -> " + title
qs = queryset.filter(wikipedia_title__isnull=False, wikipedia_scraped=None)
for inst in qs:
text = get_wiki_abstract(inst.wikipedia_url)
if text:
inst.wikipedia_abstract = text
inst.wikipedia_scraped = now()
inst.save()
print inst
if __name__ == "__main__":
main()
| #! /usr/bin/env python
import datetime
import requests
from lxml.html import parse, tostring
from tx_highered.models import Institution
def get_wiki_title(name):
endpoint = "http://en.wikipedia.org/w/api.php"
params = dict(action="opensearch",
search=name,
limit=1,
namespace=0,
format="json",)
r = requests.get(endpoint, params=params)
try:
_, results = r.json
title = results[0]
except IndexError:
return None
return title
def get_wiki_abstract(url):
doc = parse(url) # won't handle https
root = doc.getroot()
toc = root.get_element_by_id('toc')
abstract = []
for elem in toc.getparent().iterchildren():
if elem == toc:
break
if elem.tag == 'p':
elem.make_links_absolute()
abstract.append(tostring(elem))
return "\n".join(abstract).strip()
def main():
queryset = Institution.objects.filter(institution_type='uni')
qs = queryset.filter(wikipedia_title__isnull=True)
for inst in qs:
title = get_wiki_title(inst.name)
if title:
inst.wikipedia_title = title
inst.save()
print inst.name + " -> " + title
qs = queryset.filter(wikipedia_title__isnull=False, wikipedia_scraped=None)
for inst in qs:
text = get_wiki_abstract(inst.wikipedia_url)
if text:
inst.wikipedia_abstract = text
inst.wikipedia_scraped = datetime.datetime.now()
inst.save()
print inst
if __name__ == "__main__":
main()
| apache-2.0 | Python |
db3f71f537a85396d777ba28d3ad6c8156137c24 | Change pg key | juruen/cavalieri,juruen/cavalieri,juruen/cavalieri,juruen/cavalieri | src/python/pagerduty.py | src/python/pagerduty.py | import json
import urllib2
PD_URL = "https://events.pagerduty.com/generic/2010-04-15/create_event.json"
TIMEOUT = 10
def request(action, json_str):
obj = json.loads(json_str)
description = "%s %s is %s ( %s )" % (
obj.get('host', 'unknown host'),
obj.get('service', 'unknown service'),
obj.get('state', 'unknown state'),
obj.get('metric', 'nil'))
pg_key = obj.pop('pg_key')
event = {
'service_key': pg_key,
'event_type': action,
'incident_key': "%s %s" % (obj['host'], obj['service']),
'description': description,
'details': json.dumps(obj)
}
try:
result = json.loads(
urllib2.urlopen(PD_URL, json.dumps(event), TIMEOUT).read())
print result
except Exception, e:
print str(e)
return False
return result['status'] == 'success'
def trigger(json_str):
return request('trigger', json_str)
def acknowledge(json_str):
return request('acknowledge', json_str)
def resolve(json_str):
return request('resolve', json_str)
args = {
'pg_key': 'fixme',
'description': 'this is a test',
'host': 'foobar.com',
'service': 'whatever'
}
#trigger(json.dumps(args))
#resolve(json.dumps(args))
| import json
import urllib2
PD_URL = "https://events.pagerduty.com/generic/2010-04-15/create_event.json"
TIMEOUT = 10
def request(action, json_str):
obj = json.loads(json_str)
description = "%s %s is %s ( %s )" % (
obj.get('host', 'unknown host'),
obj.get('service', 'unknown service'),
obj.get('state', 'unknown state'),
obj.get('metric', 'nil'))
pg_key = obj.pop('pg_key')
event = {
'service_key': pg_key,
'event_type': action,
'incident_key': "%s %s" % (obj['host'], obj['service']),
'description': description,
'details': json.dumps(obj)
}
try:
result = json.loads(
urllib2.urlopen(PD_URL, json.dumps(event), TIMEOUT).read())
print result
except Exception, e:
print str(e)
return False
return result['status'] == 'success'
def trigger(json_str):
return request('trigger', json_str)
def acknowledge(json_str):
return request('acknowledge', json_str)
def resolve(json_str):
return request('resolve', json_str)
args = {
'pg_key': '113852fbf4d34663b87b7321e9eba1e1',
'description': 'this is a test',
'host': 'foobar.com',
'service': 'whatever'
}
#trigger(json.dumps(args))
#resolve(json.dumps(args))
| mit | Python |
3999e9812a766066dcccf6a4d07174144cb9f72d | Add Minecraft Wiki link to version item | wurstmineberg/bitbar-server-status | wurstmineberg.45s.py | wurstmineberg.45s.py | #!/usr/local/bin/python3
import requests
people = requests.get('https://api.wurstmineberg.de/v2/people.json').json()
status = requests.get('https://api.wurstmineberg.de/v2/world/wurstmineberg/status.json').json()
print(len(status['list']))
print('---')
print('Version: {ver}|href=http://minecraft.gamepedia.com/{ver} color=gray'.format(ver=status['version']))
for wmb_id in status['list']:
display_name = people['people'].get(wmb_id, {}).get('name', wmb_id)
if people['people'].get(wmb_id, False) and people['people'][wmb_id].get('slack', False):
slack_name = people['people'][wmb_id]['slack']['username']
slack_url = 'https://wurstmineberg.slack.com/messages/@' + slack_name
else:
slack_url = None
print('{}|href=https://wurstmineberg.de/people/{} color=#2889be'.format(display_name, wmb_id))
if slack_url is not None:
print('@{}|alternate=true href={} color=red'.format(slack_name, slack_url))
print('---')
print('Start Minecraft | bash=/usr/bin/open param1=-a param2=Minecraft terminal=false')
| #!/usr/local/bin/python3
import requests
people = requests.get('https://api.wurstmineberg.de/v2/people.json').json()
status = requests.get('https://api.wurstmineberg.de/v2/world/wurstmineberg/status.json').json()
print(len(status['list']))
print('---')
print('Version: {}|color=gray'.format(status['version']))
for wmb_id in status['list']:
display_name = people['people'].get(wmb_id, {}).get('name', wmb_id)
if people['people'].get(wmb_id, False) and people['people'][wmb_id].get('slack', False):
slack_name = people['people'][wmb_id]['slack']['username']
slack_url = 'https://wurstmineberg.slack.com/messages/@' + slack_name
else:
slack_url = None
print('{}|href=https://wurstmineberg.de/people/{} color=#2889be'.format(display_name, wmb_id))
if slack_url is not None:
print('@{}|alternate=true href={} color=red'.format(slack_name, slack_url))
print('---')
print('Start Minecraft | bash=/usr/bin/open param1=-a param2=Minecraft terminal=false')
| mit | Python |
d792201bc311a15e5df48259008331b771c59aca | Fix CSS problem when Flexx is enbedded in page-app | jrversteegh/flexx,JohnLunzer/flexx,zoofIO/flexx,JohnLunzer/flexx,jrversteegh/flexx,JohnLunzer/flexx,zoofIO/flexx | flexx/ui/layouts/_layout.py | flexx/ui/layouts/_layout.py | """ Layout widgets
"""
from . import Widget
class Layout(Widget):
""" Abstract class for widgets that organize their child widgets.
Panel widgets are layouts that do not take the natural size of their
content into account, making them more efficient and suited for
high-level layout. Other layouts, like HBox, are more suited for
laying out content where the natural size is important.
"""
CSS = """
body {
margin: 0;
padding: 0;
/*overflow: hidden;*/
}
.flx-Layout {
/* sizing of widgets/layouts inside layout is defined per layout */
width: 100%;
height: 100%;
margin: 0px;
padding: 0px;
border-spacing: 0px;
border: 0px;
}
"""
| """ Layout widgets
"""
from . import Widget
class Layout(Widget):
""" Abstract class for widgets that organize their child widgets.
Panel widgets are layouts that do not take the natural size of their
content into account, making them more efficient and suited for
high-level layout. Other layouts, like HBox, are more suited for
laying out content where the natural size is important.
"""
CSS = """
body {
margin: 0;
padding: 0;
overflow: hidden;
}
.flx-Layout {
/* sizing of widgets/layouts inside layout is defined per layout */
width: 100%;
height: 100%;
margin: 0px;
padding: 0px;
border-spacing: 0px;
border: 0px;
}
"""
| bsd-2-clause | Python |
a9cfdf8fdb6853f175cdc31abc2dec91ec6dcf3a | fix import | SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree | InvenTree/part/tasks.py | InvenTree/part/tasks.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
from django.utils.translation import gettext_lazy as _
import InvenTree.helpers
import InvenTree.tasks
import common.notifications
import part.models
logger = logging.getLogger("inventree")
def notify_low_stock(part: part.models.Part):
name = _("Low stock notification")
message = _(f'The available stock for {part.name} has fallen below the configured minimum level')
context = {
'part': part,
'name': name,
'message': message,
'link': InvenTree.helpers.construct_absolute_url(part.get_absolute_url()),
'template': {
'html': 'email/low_stock_notification.html',
'subject': "[InvenTree] " + name,
},
}
common.notifications.trigger_notifaction(
part,
'part.notify_low_stock',
target_fnc=part.get_subscribers,
context=context,
)
def notify_low_stock_if_required(part: part.models.Part):
"""
Check if the stock quantity has fallen below the minimum threshold of part.
If true, notify the users who have subscribed to the part
"""
# Run "up" the tree, to allow notification for "parent" parts
parts = part.get_ancestors(include_self=True, ascending=True)
for p in parts:
if p.is_part_low_on_stock():
InvenTree.tasks.offload_task(
notify_low_stock,
p
)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
from django.utils.translation import gettext_lazy as _
import InvenTree.helpers
import InvenTree.tasks
import common.notifications
import part.models
from part import tasks as part_tasks
logger = logging.getLogger("inventree")
def notify_low_stock(part: part.models.Part):
name = _("Low stock notification")
message = _(f'The available stock for {part.name} has fallen below the configured minimum level')
context = {
'part': part,
'name': name,
'message': message,
'link': InvenTree.helpers.construct_absolute_url(part.get_absolute_url()),
'template': {
'html': 'email/low_stock_notification.html',
'subject': "[InvenTree] " + name,
},
}
common.notifications.trigger_notifaction(
part,
'part.notify_low_stock',
target_fnc=part.get_subscribers,
context=context,
)
def notify_low_stock_if_required(part: part.models.Part):
"""
Check if the stock quantity has fallen below the minimum threshold of part.
If true, notify the users who have subscribed to the part
"""
# Run "up" the tree, to allow notification for "parent" parts
parts = part.get_ancestors(include_self=True, ascending=True)
for p in parts:
if p.is_part_low_on_stock():
InvenTree.tasks.offload_task(
part_tasks.notify_low_stock,
p
)
| mit | Python |
26a21a9f5da718852c193420a0132ad822139ec0 | Remove PHPBB crap | ronakkhunt/kuma,hoosteeno/kuma,surajssd/kuma,ollie314/kuma,chirilo/kuma,ronakkhunt/kuma,Elchi3/kuma,bluemini/kuma,safwanrahman/kuma,davehunt/kuma,scrollback/kuma,safwanrahman/kuma,groovecoder/kuma,hoosteeno/kuma,cindyyu/kuma,MenZil/kuma,anaran/kuma,robhudson/kuma,darkwing/kuma,mozilla/kuma,utkbansal/kuma,SphinxKnight/kuma,cindyyu/kuma,groovecoder/kuma,chirilo/kuma,scrollback/kuma,a2sheppy/kuma,MenZil/kuma,darkwing/kuma,tximikel/kuma,RanadeepPolavarapu/kuma,davidyezsetz/kuma,utkbansal/kuma,MenZil/kuma,RanadeepPolavarapu/kuma,a2sheppy/kuma,safwanrahman/kuma,tximikel/kuma,FrankBian/kuma,ollie314/kuma,varunkamra/kuma,darkwing/kuma,cindyyu/kuma,YOTOV-LIMITED/kuma,utkbansal/kuma,varunkamra/kuma,hoosteeno/kuma,jwhitlock/kuma,tximikel/kuma,carnell69/kuma,carnell69/kuma,darkwing/kuma,chirilo/kuma,yfdyh000/kuma,mastizada/kuma,yfdyh000/kuma,davehunt/kuma,ollie314/kuma,anaran/kuma,jgmize/kuma,MenZil/kuma,a2sheppy/kuma,bluemini/kuma,FrankBian/kuma,groovecoder/kuma,SphinxKnight/kuma,utkbansal/kuma,openjck/kuma,openjck/kuma,nhenezi/kuma,groovecoder/kuma,yfdyh000/kuma,RanadeepPolavarapu/kuma,surajssd/kuma,MenZil/kuma,bluemini/kuma,yfdyh000/kuma,mozilla/kuma,ronakkhunt/kuma,jezdez/kuma,chirilo/kuma,groovecoder/kuma,escattone/kuma,ronakkhunt/kuma,jezdez/kuma,robhudson/kuma,SphinxKnight/kuma,Elchi3/kuma,mastizada/kuma,cindyyu/kuma,tximikel/kuma,jezdez/kuma,carnell69/kuma,carnell69/kuma,hoosteeno/kuma,openjck/kuma,biswajitsahu/kuma,davehunt/kuma,whip112/Whip112,a2sheppy/kuma,biswajitsahu/kuma,nhenezi/kuma,jezdez/kuma,robhudson/kuma,jezdez/kuma,anaran/kuma,openjck/kuma,surajssd/kuma,Elchi3/kuma,whip112/Whip112,FrankBian/kuma,surajssd/kuma,FrankBian/kuma,whip112/Whip112,safwanrahman/kuma,davehunt/kuma,davidyezsetz/kuma,RanadeepPolavarapu/kuma,ollie314/kuma,robhudson/kuma,anaran/kuma,cindyyu/kuma,SphinxKnight/kuma,anaran/kuma,YOTOV-LIMITED/kuma,surajssd/kuma,jgmize/kuma,scrollback/kuma,biswajitsahu/kuma,whip112/Whip112,jwhitlock/kuma,nhenezi/kuma,jwhitlock/kuma,biswajitsahu/kuma,jgmize/kuma,groovecoder/kuma,varunkamra/kuma,escattone/kuma,safwanrahman/kuma,varunkamra/kuma,darkwing/kuma,jwhitlock/kuma,carnell69/kuma,biswajitsahu/kuma,mozilla/kuma,YOTOV-LIMITED/kuma,davidyezsetz/kuma,YOTOV-LIMITED/kuma,robhudson/kuma,safwanrahman/kuma,scrollback/kuma,whip112/Whip112,davehunt/kuma,ronakkhunt/kuma,davidyezsetz/kuma,davehunt/kuma,YOTOV-LIMITED/kuma,yfdyh000/kuma,FrankBian/kuma,Elchi3/kuma,Elchi3/kuma,YOTOV-LIMITED/kuma,robhudson/kuma,RanadeepPolavarapu/kuma,mozilla/kuma,chirilo/kuma,darkwing/kuma,ronakkhunt/kuma,tximikel/kuma,bluemini/kuma,hoosteeno/kuma,jgmize/kuma,nhenezi/kuma,bluemini/kuma,utkbansal/kuma,hoosteeno/kuma,scrollback/kuma,chirilo/kuma,carnell69/kuma,ollie314/kuma,openjck/kuma,cindyyu/kuma,surajssd/kuma,tximikel/kuma,RanadeepPolavarapu/kuma,anaran/kuma,jwhitlock/kuma,ollie314/kuma,mastizada/kuma,openjck/kuma,whip112/Whip112,biswajitsahu/kuma,utkbansal/kuma,mozilla/kuma,SphinxKnight/kuma,jgmize/kuma,davidyezsetz/kuma,jgmize/kuma,SphinxKnight/kuma,mastizada/kuma,varunkamra/kuma,MenZil/kuma,jezdez/kuma,yfdyh000/kuma,a2sheppy/kuma,bluemini/kuma,escattone/kuma,varunkamra/kuma,nhenezi/kuma | apps/devmo/context_processors.py | apps/devmo/context_processors.py | from django.conf import settings
from django.utils import translation
def i18n(request):
return {'LANGUAGES': settings.LANGUAGES,
'LANG': settings.LANGUAGE_URL_MAP.get(translation.get_language())
or translation.get_language(),
'DIR': 'rtl' if translation.get_language_bidi() else 'ltr',
}
def next_url(request):
if 'login' not in request.path and 'register' not in request.path:
return {'next_url': request.path }
return {} | from django.conf import settings
from django.utils import translation
def i18n(request):
return {'LANGUAGES': settings.LANGUAGES,
'LANG': settings.LANGUAGE_URL_MAP.get(translation.get_language())
or translation.get_language(),
'DIR': 'rtl' if translation.get_language_bidi() else 'ltr',
}
def next_url(request):
if 'login' not in request.path and 'register' not in request.path:
return {'next_url': request.path }
return {}
def phpbb_logged_in(request):
"""Detect PHPBB login cookie."""
return {
'PHPBB_LOGGED_IN': (request.COOKIES.get(
'%s_u' % settings.PHPBB_COOKIE_PREFIX, '1') != '1'),
'PHPBB_SID': request.COOKIES.get(
'%s_sid' % settings.PHPBB_COOKIE_PREFIX),
}
| mpl-2.0 | Python |
2bf43e3ba86cc248e752175ffb82f4eab1803119 | delete question module had bug previously | unicefuganda/uSurvey,unicefuganda/uSurvey,unicefuganda/uSurvey,unicefuganda/uSurvey,unicefuganda/uSurvey | survey/models/question_module.py | survey/models/question_module.py | from survey.models import BaseModel
from django.db import models
class QuestionModule(BaseModel):
name = models.CharField(max_length=255)
description = models.TextField(null=True, blank=True)
def remove_related_questions(self):
self.question_templates.all().delete()
def __unicode__(self):
return self.name
| from survey.models import BaseModel
from django.db import models
class QuestionModule(BaseModel):
name = models.CharField(max_length=255)
description = models.TextField(null=True, blank=True)
def remove_related_questions(self):
self.question_templates.delete()
def __unicode__(self):
return self.name
| bsd-3-clause | Python |
e21e2ff9b8258be5533261f7834438c80b0082cc | Use iter(...) instead of .iter() | jeffreyliu3230/osf.io,hmoco/osf.io,jmcarp/osf.io,kwierman/osf.io,felliott/osf.io,pattisdr/osf.io,arpitar/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,jinluyuan/osf.io,MerlinZhang/osf.io,caseyrollins/osf.io,RomanZWang/osf.io,brianjgeiger/osf.io,kch8qx/osf.io,acshi/osf.io,rdhyee/osf.io,haoyuchen1992/osf.io,cslzchen/osf.io,mluke93/osf.io,SSJohns/osf.io,jeffreyliu3230/osf.io,icereval/osf.io,ckc6cz/osf.io,jnayak1/osf.io,TomHeatwole/osf.io,wearpants/osf.io,laurenrevere/osf.io,chrisseto/osf.io,kwierman/osf.io,petermalcolm/osf.io,felliott/osf.io,sbt9uc/osf.io,njantrania/osf.io,asanfilippo7/osf.io,cosenal/osf.io,jnayak1/osf.io,reinaH/osf.io,dplorimer/osf,aaxelb/osf.io,haoyuchen1992/osf.io,adlius/osf.io,mluo613/osf.io,brandonPurvis/osf.io,jinluyuan/osf.io,jeffreyliu3230/osf.io,amyshi188/osf.io,binoculars/osf.io,cwisecarver/osf.io,billyhunt/osf.io,MerlinZhang/osf.io,reinaH/osf.io,erinspace/osf.io,kwierman/osf.io,lyndsysimon/osf.io,samchrisinger/osf.io,Johnetordoff/osf.io,samanehsan/osf.io,monikagrabowska/osf.io,mattclark/osf.io,reinaH/osf.io,arpitar/osf.io,danielneis/osf.io,SSJohns/osf.io,monikagrabowska/osf.io,samchrisinger/osf.io,mluo613/osf.io,Johnetordoff/osf.io,cldershem/osf.io,njantrania/osf.io,mluke93/osf.io,wearpants/osf.io,brandonPurvis/osf.io,lyndsysimon/osf.io,zamattiac/osf.io,fabianvf/osf.io,caneruguz/osf.io,sbt9uc/osf.io,caseyrygt/osf.io,HarryRybacki/osf.io,mattclark/osf.io,alexschiller/osf.io,KAsante95/osf.io,saradbowman/osf.io,SSJohns/osf.io,monikagrabowska/osf.io,hmoco/osf.io,TomBaxter/osf.io,brianjgeiger/osf.io,ckc6cz/osf.io,chrisseto/osf.io,adlius/osf.io,njantrania/osf.io,ZobairAlijan/osf.io,GageGaskins/osf.io,dplorimer/osf,jolene-esposito/osf.io,caseyrollins/osf.io,SSJohns/osf.io,ticklemepierce/osf.io,sloria/osf.io,billyhunt/osf.io,mfraezz/osf.io,zamattiac/osf.io,zachjanicki/osf.io,HarryRybacki/osf.io,mluke93/osf.io,acshi/osf.io,ZobairAlijan/osf.io,caneruguz/osf.io,caneruguz/osf.io,cslzchen/osf.io,brandonPurvis/osf.io,chennan47/osf.io,aaxelb/osf.io,alexschiller/osf.io,zachjanicki/osf.io,Nesiehr/osf.io,mluo613/osf.io,mluo613/osf.io,sbt9uc/osf.io,adlius/osf.io,abought/osf.io,amyshi188/osf.io,jinluyuan/osf.io,aaxelb/osf.io,CenterForOpenScience/osf.io,TomHeatwole/osf.io,cslzchen/osf.io,billyhunt/osf.io,cwisecarver/osf.io,caneruguz/osf.io,samchrisinger/osf.io,leb2dg/osf.io,TomHeatwole/osf.io,billyhunt/osf.io,haoyuchen1992/osf.io,kch8qx/osf.io,hmoco/osf.io,KAsante95/osf.io,adlius/osf.io,asanfilippo7/osf.io,emetsger/osf.io,monikagrabowska/osf.io,rdhyee/osf.io,cosenal/osf.io,acshi/osf.io,ckc6cz/osf.io,doublebits/osf.io,erinspace/osf.io,abought/osf.io,alexschiller/osf.io,jnayak1/osf.io,bdyetton/prettychart,mluke93/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,jmcarp/osf.io,doublebits/osf.io,kch8qx/osf.io,MerlinZhang/osf.io,cwisecarver/osf.io,TomBaxter/osf.io,asanfilippo7/osf.io,bdyetton/prettychart,cldershem/osf.io,DanielSBrown/osf.io,zamattiac/osf.io,fabianvf/osf.io,dplorimer/osf,ticklemepierce/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,sloria/osf.io,jnayak1/osf.io,kch8qx/osf.io,chrisseto/osf.io,mattclark/osf.io,baylee-d/osf.io,amyshi188/osf.io,aaxelb/osf.io,crcresearch/osf.io,fabianvf/osf.io,CenterForOpenScience/osf.io,arpitar/osf.io,mfraezz/osf.io,RomanZWang/osf.io,jeffreyliu3230/osf.io,cwisecarver/osf.io,DanielSBrown/osf.io,doublebits/osf.io,emetsger/osf.io,hmoco/osf.io,kch8qx/osf.io,brianjgeiger/osf.io,kwierman/osf.io,ticklemepierce/osf.io,zachjanicki/osf.io,RomanZWang/osf.io,emetsger/osf.io,jinluyuan/osf.io,HarryRybacki/osf.io,leb2dg/osf.io,danielneis/osf.io,jolene-esposito/osf.io,saradbowman/osf.io,chennan47/osf.io,caseyrollins/osf.io,jmcarp/osf.io,zachjanicki/osf.io,leb2dg/osf.io,crcresearch/osf.io,petermalcolm/osf.io,lyndsysimon/osf.io,amyshi188/osf.io,ckc6cz/osf.io,Johnetordoff/osf.io,doublebits/osf.io,TomBaxter/osf.io,samanehsan/osf.io,felliott/osf.io,lyndsysimon/osf.io,ZobairAlijan/osf.io,pattisdr/osf.io,chennan47/osf.io,chrisseto/osf.io,dplorimer/osf,mfraezz/osf.io,GageGaskins/osf.io,cosenal/osf.io,samanehsan/osf.io,danielneis/osf.io,icereval/osf.io,sbt9uc/osf.io,rdhyee/osf.io,bdyetton/prettychart,KAsante95/osf.io,petermalcolm/osf.io,billyhunt/osf.io,cslzchen/osf.io,caseyrygt/osf.io,GageGaskins/osf.io,Ghalko/osf.io,emetsger/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,felliott/osf.io,crcresearch/osf.io,rdhyee/osf.io,brandonPurvis/osf.io,HarryRybacki/osf.io,njantrania/osf.io,fabianvf/osf.io,icereval/osf.io,jolene-esposito/osf.io,alexschiller/osf.io,Nesiehr/osf.io,jolene-esposito/osf.io,HalcyonChimera/osf.io,doublebits/osf.io,haoyuchen1992/osf.io,reinaH/osf.io,cldershem/osf.io,monikagrabowska/osf.io,DanielSBrown/osf.io,RomanZWang/osf.io,Nesiehr/osf.io,asanfilippo7/osf.io,binoculars/osf.io,Ghalko/osf.io,samchrisinger/osf.io,alexschiller/osf.io,danielneis/osf.io,cosenal/osf.io,acshi/osf.io,Nesiehr/osf.io,erinspace/osf.io,Ghalko/osf.io,wearpants/osf.io,petermalcolm/osf.io,acshi/osf.io,abought/osf.io,brandonPurvis/osf.io,GageGaskins/osf.io,wearpants/osf.io,arpitar/osf.io,mluo613/osf.io,ZobairAlijan/osf.io,jmcarp/osf.io,samanehsan/osf.io,bdyetton/prettychart,caseyrygt/osf.io,KAsante95/osf.io,TomHeatwole/osf.io,DanielSBrown/osf.io,baylee-d/osf.io,ticklemepierce/osf.io,sloria/osf.io,RomanZWang/osf.io,CenterForOpenScience/osf.io,zamattiac/osf.io,leb2dg/osf.io,KAsante95/osf.io,abought/osf.io,laurenrevere/osf.io,Ghalko/osf.io,laurenrevere/osf.io,MerlinZhang/osf.io,GageGaskins/osf.io,binoculars/osf.io,cldershem/osf.io,caseyrygt/osf.io | framework/tasks/handlers.py | framework/tasks/handlers.py | # -*- coding: utf-8 -*-
import logging
import functools
from flask import g
from celery import group
from website import settings
logger = logging.getLogger(__name__)
def celery_before_request():
g._celery_tasks = []
def celery_teardown_request(error=None):
if error is not None:
return
try:
tasks = g._celery_tasks
if tasks:
group(iter(tasks)).apply_async()
except AttributeError:
if not settings.DEBUG_MODE:
logger.error('Task queue not initialized')
def enqueue_task(signature):
"""If working in a request context, push task signature to ``g`` to run
after request is complete; else run signature immediately.
:param signature: Celery task signature
"""
try:
if signature not in g._celery_tasks:
g._celery_tasks.append(signature)
except RuntimeError:
signature()
def queued_task(task):
"""Decorator that adds the wrapped task to the queue on ``g`` if Celery is
enabled, else runs the task synchronously. Can only be applied to Celery
tasks; should be used for all tasks fired within a request context that
may write to the database to avoid race conditions.
"""
@functools.wraps(task)
def wrapped(*args, **kwargs):
if settings.USE_CELERY:
signature = task.si(*args, **kwargs)
enqueue_task(signature)
else:
task(*args, **kwargs)
return wrapped
handlers = {
'before_request': celery_before_request,
'teardown_request': celery_teardown_request,
}
| # -*- coding: utf-8 -*-
import logging
import functools
from flask import g
from celery import group
from website import settings
logger = logging.getLogger(__name__)
def celery_before_request():
g._celery_tasks = []
def celery_teardown_request(error=None):
if error is not None:
return
try:
tasks = g._celery_tasks
if tasks:
group(tasks.iter()).apply_async()
except AttributeError:
if not settings.DEBUG_MODE:
logger.error('Task queue not initialized')
def enqueue_task(signature):
"""If working in a request context, push task signature to ``g`` to run
after request is complete; else run signature immediately.
:param signature: Celery task signature
"""
try:
if signature not in g._celery_tasks:
g._celery_tasks.append(signature)
except RuntimeError:
signature()
def queued_task(task):
"""Decorator that adds the wrapped task to the queue on ``g`` if Celery is
enabled, else runs the task synchronously. Can only be applied to Celery
tasks; should be used for all tasks fired within a request context that
may write to the database to avoid race conditions.
"""
@functools.wraps(task)
def wrapped(*args, **kwargs):
if settings.USE_CELERY:
signature = task.si(*args, **kwargs)
enqueue_task(signature)
else:
task(*args, **kwargs)
return wrapped
handlers = {
'before_request': celery_before_request,
'teardown_request': celery_teardown_request,
}
| apache-2.0 | Python |
9e2f9b040d0dde3237daca1c483c8b2bf0170663 | Update Arch package to 2.7 | bowlofstew/packages,biicode/packages,biicode/packages,bowlofstew/packages | archlinux/archpack_settings.py | archlinux/archpack_settings.py | #
# Biicode Arch Linux package settings.
#
# Check PKGBUILD_template docs for those settings and
# what they mean.
#
def settings():
return { "version": "2.7",
"release_number": "1",
"arch_deps": ["cmake>=3.0.2",
"zlib",
"glibc",
"sqlite",
"wget",
"python2-pmw"
],
"debian_deps": ["zlib1g",
"libc-bin",
"libsqlite3-0",
"wget",
"lib32z1",
"python-tk"
]
}
if __name__ == '__main__':
print(settings())
| #
# Biicode Arch Linux package settings.
#
# Check PKGBUILD_template docs for those settings and
# what they mean.
#
def settings():
return { "version": "2.6.1",
"release_number": "1",
"arch_deps": ["cmake>=3.0.2",
"zlib",
"glibc",
"sqlite",
"wget",
"python2-pmw"
],
"debian_deps": ["zlib1g",
"libc-bin",
"libsqlite3-0",
"wget",
"lib32z1",
"python-tk"
]
}
if __name__ == '__main__':
print(settings())
| bsd-2-clause | Python |
ecd33e00eb5eb8ff58358e01a6d618262e8381a6 | Update upstream version of vo | larrybradley/astropy,MSeifert04/astropy,tbabej/astropy,dhomeier/astropy,tbabej/astropy,StuartLittlefair/astropy,mhvk/astropy,astropy/astropy,lpsinger/astropy,DougBurke/astropy,aleksandr-bakanov/astropy,StuartLittlefair/astropy,dhomeier/astropy,joergdietrich/astropy,MSeifert04/astropy,stargaser/astropy,pllim/astropy,StuartLittlefair/astropy,larrybradley/astropy,saimn/astropy,AustereCuriosity/astropy,DougBurke/astropy,StuartLittlefair/astropy,AustereCuriosity/astropy,aleksandr-bakanov/astropy,funbaker/astropy,stargaser/astropy,kelle/astropy,joergdietrich/astropy,joergdietrich/astropy,joergdietrich/astropy,AustereCuriosity/astropy,dhomeier/astropy,AustereCuriosity/astropy,mhvk/astropy,astropy/astropy,saimn/astropy,saimn/astropy,funbaker/astropy,bsipocz/astropy,dhomeier/astropy,saimn/astropy,aleksandr-bakanov/astropy,kelle/astropy,pllim/astropy,bsipocz/astropy,tbabej/astropy,lpsinger/astropy,bsipocz/astropy,DougBurke/astropy,MSeifert04/astropy,larrybradley/astropy,pllim/astropy,kelle/astropy,stargaser/astropy,pllim/astropy,astropy/astropy,larrybradley/astropy,bsipocz/astropy,kelle/astropy,aleksandr-bakanov/astropy,mhvk/astropy,funbaker/astropy,kelle/astropy,DougBurke/astropy,tbabej/astropy,joergdietrich/astropy,astropy/astropy,dhomeier/astropy,saimn/astropy,MSeifert04/astropy,tbabej/astropy,funbaker/astropy,astropy/astropy,stargaser/astropy,lpsinger/astropy,lpsinger/astropy,larrybradley/astropy,mhvk/astropy,pllim/astropy,StuartLittlefair/astropy,AustereCuriosity/astropy,lpsinger/astropy,mhvk/astropy | astropy/io/vo/setup_package.py | astropy/io/vo/setup_package.py | from distutils.core import Extension
from os.path import join
from astropy import setup_helpers
def get_extensions(build_type='release'):
VO_DIR = 'astropy/io/vo/src'
return [Extension(
"astropy.io.vo.tablewriter",
[join(VO_DIR, "tablewriter.c")],
include_dirs=[VO_DIR])]
def get_package_data():
return {
'astropy.io.vo': [
'data/ucd1p-words.txt', 'data/*.xsd', 'data/*.dtd'],
'astropy.io.vo.tests': [
'data/*.xml', 'data/*.gz', 'data/*.json', 'data/*.fits',
'data/*.txt'],
'astropy.io.vo.validator': [
'urls/*.dat.gz']}
def get_legacy_alias():
return setup_helpers.add_legacy_alias(
'vo', 'astropy.io.vo', '0.8')
| from distutils.core import Extension
from os.path import join
from astropy import setup_helpers
def get_extensions(build_type='release'):
VO_DIR = 'astropy/io/vo/src'
return [Extension(
"astropy.io.vo.tablewriter",
[join(VO_DIR, "tablewriter.c")],
include_dirs=[VO_DIR])]
def get_package_data():
return {
'astropy.io.vo': [
'data/ucd1p-words.txt', 'data/*.xsd', 'data/*.dtd'],
'astropy.io.vo.tests': [
'data/*.xml', 'data/*.gz', 'data/*.json', 'data/*.fits',
'data/*.txt'],
'astropy.io.vo.validator': [
'urls/*.dat.gz']}
def get_legacy_alias():
return setup_helpers.add_legacy_alias(
'vo', 'astropy.io.vo', '0.7.2')
| bsd-3-clause | Python |
dc7ac28109609e2a90856dbaf01ae8bbb2fd6985 | Repair the test (adding a docstring to the module type changed the docstring for an uninitialized module object). | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | Lib/test/test_module.py | Lib/test/test_module.py | # Test the module type
from test_support import verify, vereq, verbose, TestFailed
import sys
module = type(sys)
# An uninitialized module has no __dict__ or __name__, and __doc__ is None
foo = module.__new__(module)
verify(foo.__dict__ is None)
try:
s = foo.__name__
except AttributeError:
pass
else:
raise TestFailed, "__name__ = %s" % repr(s)
vereq(foo.__doc__, module.__doc__)
# Regularly initialized module, no docstring
foo = module("foo")
vereq(foo.__name__, "foo")
vereq(foo.__doc__, None)
vereq(foo.__dict__, {"__name__": "foo", "__doc__": None})
# ASCII docstring
foo = module("foo", "foodoc")
vereq(foo.__name__, "foo")
vereq(foo.__doc__, "foodoc")
vereq(foo.__dict__, {"__name__": "foo", "__doc__": "foodoc"})
# Unicode docstring
foo = module("foo", u"foodoc\u1234")
vereq(foo.__name__, "foo")
vereq(foo.__doc__, u"foodoc\u1234")
vereq(foo.__dict__, {"__name__": "foo", "__doc__": u"foodoc\u1234"})
# Reinitialization should not replace the __dict__
foo.bar = 42
d = foo.__dict__
foo.__init__("foo", "foodoc")
vereq(foo.__name__, "foo")
vereq(foo.__doc__, "foodoc")
vereq(foo.bar, 42)
vereq(foo.__dict__, {"__name__": "foo", "__doc__": "foodoc", "bar": 42})
verify(foo.__dict__ is d)
if verbose:
print "All OK"
| # Test the module type
from test_support import verify, vereq, verbose, TestFailed
import sys
module = type(sys)
# An uninitialized module has no __dict__ or __name__, and __doc__ is None
foo = module.__new__(module)
verify(foo.__dict__ is None)
try:
s = foo.__name__
except AttributeError:
pass
else:
raise TestFailed, "__name__ = %s" % repr(s)
vereq(foo.__doc__, None)
# Regularly initialized module, no docstring
foo = module("foo")
vereq(foo.__name__, "foo")
vereq(foo.__doc__, None)
vereq(foo.__dict__, {"__name__": "foo", "__doc__": None})
# ASCII docstring
foo = module("foo", "foodoc")
vereq(foo.__name__, "foo")
vereq(foo.__doc__, "foodoc")
vereq(foo.__dict__, {"__name__": "foo", "__doc__": "foodoc"})
# Unicode docstring
foo = module("foo", u"foodoc\u1234")
vereq(foo.__name__, "foo")
vereq(foo.__doc__, u"foodoc\u1234")
vereq(foo.__dict__, {"__name__": "foo", "__doc__": u"foodoc\u1234"})
# Reinitialization should not replace the __dict__
foo.bar = 42
d = foo.__dict__
foo.__init__("foo", "foodoc")
vereq(foo.__name__, "foo")
vereq(foo.__doc__, "foodoc")
vereq(foo.bar, 42)
vereq(foo.__dict__, {"__name__": "foo", "__doc__": "foodoc", "bar": 42})
verify(foo.__dict__ is d)
if verbose:
print "All OK"
| mit | Python |
5abac5e7cdc1d67ec6ed0996a5b132fae20af530 | Use the URLs input in the UI boxes | scraperwiki/difference-tool,scraperwiki/difference-tool,scraperwiki/difference-tool,scraperwiki/difference-tool | compare_text_of_urls.py | compare_text_of_urls.py | #!/usr/bin/env python
from __future__ import print_function
import json
import os
from os.path import join, dirname, abspath
import subprocess
import sys
from get_text_from_url import process_page
def main(argv=None):
if argv is None:
argv = sys.argv
arg = argv[1:]
# Enter two URLs with a space between them
if len(arg) > 0:
# Developers can supply URL as an argument...
urls = arg[0]
else:
# ... but normally the URL comes from the allSettings.json file
with open(os.path.expanduser("~/allSettings.json")) as settings_json:
settings = json.load(settings_json)
url1 = settings['source-url']
url2 = settings['source-url2']
assert url1 and url2, 'Two URLs not entered.'
diff_urls(url1, url2)
def diff_urls(url1, url2):
text1 = process_page('text_from_url1', url1)
text2 = process_page('text_from_url2', url2)
subprocess.check_output("./diff_text.sh", cwd=dirname(abspath(__file__)))
if __name__ == '__main__':
main()
| #!/usr/bin/env python
from __future__ import print_function
import json
import os
from os.path import join, dirname, abspath
import subprocess
import sys
from get_text_from_url import process_page
def main(argv=None):
if argv is None:
argv = sys.argv
arg = argv[1:]
# Enter two URLs with a space between them
if len(arg) > 0:
# Developers can supply URL as an argument...
urls = arg[0]
else:
# ... but normally the URL comes from the allSettings.json file
with open(os.path.expanduser("~/allSettings.json")) as settings:
urls = json.load(settings)['source-url']
parsed_urls = urls.strip().split(' ')
assert len(parsed_urls) == 2, 'Two URLs not entered.'
diff_urls(parsed_urls[0], parsed_urls[1])
def diff_urls(url1, url2):
text1 = process_page('text_from_url1', url1)
text2 = process_page('text_from_url2', url2)
subprocess.check_output("./diff_text.sh", cwd=dirname(abspath(__file__)))
if __name__ == '__main__':
main()
| agpl-3.0 | Python |
f81c36d4fe31815ed6692b573ad660067151d215 | Drop use of 'oslo' namespace package | openstack/python-zaqarclient | zaqarclient/_i18n.py | zaqarclient/_i18n.py | # Copyright 2014 Red Hat, Inc
# All Rights .Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_i18n import * # noqa
_translators = TranslatorFactory(domain='zaqarclient')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
| # Copyright 2014 Red Hat, Inc
# All Rights .Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.i18n import * # noqa
_translators = TranslatorFactory(domain='zaqarclient')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
| apache-2.0 | Python |