code
stringlengths 10
805k
| def_use_chains
sequencelengths 0
667
|
---|---|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# #*** <License> ************************************************************#
# This module is part of the repository CNDB.
#
# This module is licensed under the terms of the BSD 3-Clause License
# <http://www.c-tanzer.at/license/bsd_3c.html>.
# #*** </License> ***********************************************************#
from _TFL.pyk import pyk
from rsclib.HTML_Parse import tag, Page_Tree
from rsclib.autosuper import autosuper
from spider.common import Interface, Inet4, Inet6, unroutable
from spider.common import WLAN_Config
from spider.luci import Version_Mixin
class Status (Page_Tree, Version_Mixin) :
url = 'cgi-bin/luci/freifunk/status/status'
retries = 2
timeout = 10
html_charset = 'utf-8' # force utf-8 encoding
wl_names = dict \
( ssid = 'ssid'
, _bsiid = 'bssid'
, channel = 'channel'
, mode = 'mode'
)
def parse (self) :
root = self.tree.getroot ()
self.wlans = []
self.routes = {}
for div in root.findall (".//%s" % tag ("div")) :
id = div.get ('id')
if id == 'cbi-wireless' :
wlan_div = div
elif id == 'cbi-routes' :
route_div = div
self.try_get_version (div)
for d in self.tbl_iter (wlan_div) :
for k, newkey in pyk.iteritems (self.wl_names) :
if k in d :
d [newkey] = d [k]
wl = WLAN_Config (** d)
self.wlans.append (wl)
for d in self.tbl_iter (route_div) :
iface = d.get ('iface')
gw = d.get ('gateway')
if iface and gw :
self.routes [iface] = gw
self.set_version (root)
# end def parse
def tbl_iter (self, div) :
tbl = div.find (".//%s" % tag ("table"))
assert tbl.get ('class') == 'cbi-section-table'
d = {}
for tr in tbl :
if 'cbi-section-table-row' not in tr.get ('class').split () :
continue
for input in tr.findall (".//%s" % tag ('input')) :
name = input.get ('id').split ('.') [-1]
val = input.get ('value')
d [name] = val
if not d :
continue
yield d
# end def tbl_iter
# end class Status
class Table_Iter (Page_Tree) :
def table_iter (self) :
root = self.tree.getroot ()
for div in root.findall (".//%s" % tag ("div")) :
if div.get ('id') == 'maincontent' :
break
tbl = div.find (".//%s" % tag ("table"))
if tbl is None :
return
for tr in tbl :
if tr [0].tag == tag ('th') :
continue
yield (self.tree.get_text (x) for x in tr)
# end def table_iter
# end class Table_Iter
class OLSR_Connections (Table_Iter) :
url = 'cgi-bin/luci/freifunk/olsr/'
retries = 2
timeout = 10
html_charset = 'utf-8' # force utf-8 encoding
def parse (self) :
self.neighbors = {}
for l in self.table_iter () :
neighbor, ip, lq, nlq, etx = l
lq, nlq, etx = (float (x) for x in (lq, nlq, etx))
self.neighbors [neighbor] = [ip, lq, nlq, etx]
# end def parse
# end class OLSR_Connections
class OLSR_Routes (Table_Iter) :
url = 'cgi-bin/luci/freifunk/olsr/routes'
retries = 2
timeout = 10
html_charset = 'utf-8' # force utf-8 encoding
def parse (self) :
self.iface_by_gw = {}
for l in self.table_iter () :
announced, gw, iface, metric, etx = l
if gw in self.iface_by_gw :
assert iface == self.iface_by_gw [gw]
else :
self.iface_by_gw [gw] = iface
# end def parse
# end class OLSR_Routes
class OpenWRT (autosuper) :
def __init__ (self, site, request) :
self.site = site
self.request = request
if 'interfaces' in self.request or 'ips' in self.request :
st = Status (site = site)
conn = OLSR_Connections (site = site)
route = OLSR_Routes (site = site)
self.version = st.version
assert len (st.wlans) <= 1
interfaces = {}
ips = {}
count = 0
for gw, ifname in pyk.iteritems (route.iface_by_gw) :
ip, lq, nlq, etx = conn.neighbors [gw]
i4 = Inet4 (ip, None, None, iface = ifname)
ips [i4] = 1
is_wlan = True
if lq == nlq == etx == 1.0 :
is_wlan = False
if ifname in interfaces :
iface = interfaces [ifname]
if not iface.is_wlan and is_wlan :
iface.is_wlan = True
iface.wlan_info = st.wlans [0]
else :
iface = Interface (count, ifname, None)
iface.is_wlan = is_wlan
if is_wlan :
iface.wlan_info = st.wlans [0]
count += 1
interfaces [ifname] = iface
if i4 not in iface.inet4 :
iface.append_inet4 (i4)
wl_if = None
for iface in pyk.itervalues (interfaces) :
if iface.is_wlan :
if wl_if :
m = "Duplicate wlan: %s/%s" % (iface.name, wl_if.name)
raise ValueError (m)
wl_if = iface
# check own ip
n = 'unknown'
i4 = Inet4 (self.request ['ip'], None, None, iface = n)
if i4 not in ips :
assert n not in interfaces
iface = interfaces [n] = Interface (count, n, None)
iface.append_inet4 (i4)
iface.is_wlan = False
if not wl_if and st.wlans :
iface.is_wlan = True
iface.wlan_info = st.wlans [0]
ips [i4] = True
self.request ['ips'] = ips
self.request ['interfaces'] = interfaces
self.request ['version'] = st.version
# end def __init__
# end class OpenWRT
| [
[
[
400,
403
],
[
1447,
1450
],
[
4508,
4511
],
[
5479,
5482
]
],
[
[
438,
441
],
[
1149,
1152
],
[
1925,
1928
],
[
2181,
2184
],
[
2581,
2584
],
[
2701,
2704
],
[
2813,
2816
]
],
[
[
443,
452
],
[
672,
681
],
[
2459,
2468
]
],
[
[
486,
495
],
[
3985,
3994
]
],
[
[
529,
538
],
[
5099,
5108
],
[
5970,
5979
]
],
[
[
540,
545
],
[
4621,
4626
],
[
5804,
5809
]
],
[
[
547,
552
]
],
[
[
554,
564
]
],
[
[
598,
609
],
[
1563,
1574
]
],
[
[
643,
656
],
[
683,
696
]
],
[
[
664,
670
],
[
4186,
4192
]
],
[
[
2447,
2457
],
[
2980,
2990
],
[
3461,
3471
]
],
[
[
2962,
2978
],
[
4237,
4253
]
],
[
[
3448,
3459
],
[
4288,
4299
]
],
[
[
3976,
3983
]
]
] |
# UCF Senior Design 2017-18
# Group 38
from PIL import Image
import cv2
import imagehash
import math
import numpy as np
DIFF_THRES = 20
LIMIT = 2
RESIZE = 1000
def calc_hash(img):
"""
Calculate the wavelet hash of the image
img: (ndarray) image file
"""
# resize image if height > 1000
img = resize(img)
return imagehash.whash(Image.fromarray(img))
def compare(hash1, hash2):
"""
Calculate the difference between two images
hash1: (array) first wavelet hash
hash2: (array) second wavelet hash
"""
return hash1 - hash2
def limit(img, std_hash, count):
"""
Determine whether image should be removed from image dictionary in main.py
img: (ndarray) image file
std_hash: (array) wavelet hash of comparison standard
count: (int) global count of images similar to comparison standard
"""
# calculate hash for given image
cmp_hash = calc_hash(img)
# compare to standard
diff = compare(std_hash, cmp_hash)
# image is similar to standard
if diff <= DIFF_THRES:
# if there are 3 similar images already, remove image
if count >= LIMIT:
return 'remove'
# non-similar image found
else:
# update comparison standard
return 'update_std'
# else continue reading images with same standard
return 'continue'
def resize(img):
"""
Resize an image
img: (ndarray) RGB color image
"""
# get dimensions of image
width = np.shape(img)[1]
height = np.shape(img)[0]
# if height of image is greater than 1000, resize it to 1000
if width > RESIZE:
# keep resize proportional
scale = RESIZE / width
resized_img = cv2.resize(
img, (RESIZE, math.floor(height / scale)), cv2.INTER_AREA)
# return resized image
return resized_img
# if height of image is less than 1000, return image unresized
return img
def set_standard(images, filename):
"""
Set new comparison standard and update information
images: (dictionary) dictionary containing all the image data
filename: (String) name of the image file
"""
return filename, calc_hash(images[filename]), 0
| [
[
[
56,
61
],
[
363,
368
]
],
[
[
69,
72
],
[
1745,
1748
],
[
1812,
1815
]
],
[
[
80,
89
],
[
347,
356
]
],
[
[
97,
101
],
[
1783,
1787
]
],
[
[
109,
120
],
[
1521,
1523
],
[
1551,
1553
]
],
[
[
122,
132
],
[
1073,
1083
]
],
[
[
138,
143
],
[
1167,
1172
]
],
[
[
148,
154
],
[
1649,
1655
],
[
1708,
1714
],
[
1775,
1781
]
],
[
[
168,
177
],
[
941,
950
],
[
2219,
2228
]
],
[
[
391,
398
],
[
994,
1001
]
],
[
[
594,
599
]
],
[
[
1391,
1397
],
[
324,
330
]
],
[
[
1975,
1987
]
]
] |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .cli.cli import main
# TODO(hongyes): add more commands:
# kfp compile (migrate from dsl-compile)
# kfp experiment (manage experiments)
if __name__ == '__main__':
main()
| [
[
[
597,
601
],
[
750,
754
]
]
] |
import time
from PyQt5 import QtGui, QtCore
from ui.room_item import Ui_Form
from PyQt5.QtWidgets import QWidget
class Room_Item(QWidget,Ui_Form):
def __init__(self,parent=None,room_data=None):
super(Room_Item,self).__init__(parent)
self.setupUi(self)
self.data = room_data
self.setRoomInfo()
def setRoomInfo(self):
self.room_name.setText('{}({})'.format(self.data['naturalName'], self.data['roomName']))
self.description.setText("<a style='color:#BCBCBC'>{}</a>".format(self.data['description']))
timeStamp = int(self.data['creationDate']) / 1000
timeArray = time.localtime(timeStamp)
otherStyleTime = time.strftime("%Y-%m-%d", timeArray)
self.create_time.setText("<a style='color:#BCBCBC'>{}</a>".format(otherStyleTime))
members = len(self.data['owners']) + len(self.data['admins']) + len(self.data['members'])
memberCounter = "<a style='color:#BCBCBC'>{}/{}</a>".format(members, ('∞' if self.data['maxUsers']==0 else self.data['maxUsers']))
self.member.setText(memberCounter) | [
[
[
7,
11
],
[
636,
640
],
[
687,
691
]
],
[
[
31,
36
]
],
[
[
38,
44
]
],
[
[
71,
78
],
[
140,
147
]
],
[
[
107,
114
],
[
132,
139
]
],
[
[
122,
131
],
[
215,
224
]
]
] |
import asyncio
import re
import sys
import traceback
import toga
from toga import Key
from .keys import toga_to_winforms_key
from .libs import Threading, WinForms, shcore, user32, win_version
from .libs.proactor import WinformsProactorEventLoop
from .window import Window
class MainWindow(Window):
def winforms_FormClosing(self, sender, event):
if not self.interface.app._impl._is_exiting:
event.Cancel = not self.interface.app.exit()
class App:
_MAIN_WINDOW_CLASS = MainWindow
def __init__(self, interface):
self.interface = interface
self.interface._impl = self
# Winforms app exit is tightly bound to the close of the MainWindow.
# The FormClosing message on MainWindow calls app.exit(), which
# will then trigger the "on_exit" handler (which might abort the
# close). However, if app.exit() succeeds, it will request the
# Main Window to close... which calls app.exit().
# So - we have a flag that is only ever sent once a request has been
# made to exit the native app. This flag can be used to shortcut any
# window-level close handling.
self._is_exiting = False
self.loop = WinformsProactorEventLoop()
asyncio.set_event_loop(self.loop)
def create(self):
self.native = WinForms.Application
self.app_context = WinForms.ApplicationContext()
# Check the version of windows and make sure we are setting the DPI mode
# with the most up to date API
# Windows Versioning Check Sources : https://www.lifewire.com/windows-version-numbers-2625171
# and https://docs.microsoft.com/en-us/windows/release-information/
if win_version.Major >= 6: # Checks for Windows Vista or later
# Represents Windows 8.1 up to Windows 10 before Build 1703 which should use
# SetProcessDpiAwareness(True)
if ((win_version.Major == 6 and win_version.Minor == 3) or
(win_version.Major == 10 and win_version.Build < 15063)):
shcore.SetProcessDpiAwareness(True)
# Represents Windows 10 Build 1703 and beyond which should use
# SetProcessDpiAwarenessContext(-2)
elif win_version.Major == 10 and win_version.Build >= 15063:
user32.SetProcessDpiAwarenessContext(-2)
# Any other version of windows should use SetProcessDPIAware()
else:
user32.SetProcessDPIAware()
self.native.EnableVisualStyles()
self.native.SetCompatibleTextRenderingDefault(False)
self.interface.commands.add(
toga.Command(
lambda _: self.interface.about(),
'About {}'.format(self.interface.name),
group=toga.Group.HELP
),
toga.Command(None, 'Preferences', group=toga.Group.FILE),
# Quit should always be the last item, in a section on it's own
toga.Command(
lambda _: self.interface.exit(),
'Exit ' + self.interface.name,
shortcut=Key.MOD_1 + 'q',
group=toga.Group.FILE,
section=sys.maxsize
),
toga.Command(
lambda _: self.interface.visit_homepage(),
'Visit homepage',
enabled=self.interface.home_page is not None,
group=toga.Group.HELP
)
)
self._create_app_commands()
# Call user code to populate the main window
self.interface.startup()
self.create_menus()
self.interface.icon.bind(self.interface.factory)
self.interface.main_window._impl.set_app(self)
def create_menus(self):
self._menu_items = {}
self._menu_groups = {}
toga.Group.FILE.order = 0
menubar = WinForms.MenuStrip()
submenu = None
for cmd in self.interface.commands:
if cmd == toga.GROUP_BREAK:
submenu = None
elif cmd == toga.SECTION_BREAK:
submenu.DropDownItems.Add('-')
else:
submenu = self._submenu(cmd.group, menubar)
item = WinForms.ToolStripMenuItem(cmd.label)
if cmd.action:
item.Click += cmd._impl.as_handler()
item.Enabled = cmd.enabled
if cmd.shortcut is not None:
shortcut_keys = toga_to_winforms_key(cmd.shortcut)
item.ShortcutKeys = shortcut_keys
item.ShowShortcutKeys = True
cmd._impl.native.append(item)
self._menu_items[item] = cmd
submenu.DropDownItems.Add(item)
self.interface.main_window._impl.native.Controls.Add(menubar)
self.interface.main_window._impl.native.MainMenuStrip = menubar
self.interface.main_window.content.refresh()
def _submenu(self, group, menubar):
try:
return self._menu_groups[group]
except KeyError:
if group is None:
submenu = menubar
else:
parent_menu = self._submenu(group.parent, menubar)
submenu = WinForms.ToolStripMenuItem(group.label)
# Top level menus are added in a different way to submenus
if group.parent is None:
parent_menu.Items.Add(submenu)
else:
parent_menu.DropDownItems.Add(submenu)
self._menu_groups[group] = submenu
return submenu
def _create_app_commands(self):
# No extra menus
pass
def open_document(self, fileURL):
'''Add a new document to this app.'''
print("STUB: If you want to handle opening documents, implement App.open_document(fileURL)")
def winforms_thread_exception(self, sender, winforms_exc):
# The PythonException returned by Winforms doesn't give us
# easy access to the underlying Python stacktrace; so we
# reconstruct it from the string message.
# The Python message is helpfully included in square brackets,
# as the context for the first line in the .net stack trace.
# So, look for the closing bracket and the start of the Python.net
# stack trace. Then, reconstruct the line breaks internal to the
# remaining string.
print("Traceback (most recent call last):")
py_exc = winforms_exc.get_Exception()
full_stack_trace = py_exc.StackTrace
regex = re.compile(
r"^\[(?:'(.*?)', )*(?:'(.*?)')\] (?:.*?) Python\.Runtime",
re.DOTALL | re.UNICODE
)
stacktrace_relevant_lines = regex.findall(full_stack_trace)
if len(stacktrace_relevant_lines) == 0:
self.print_stack_trace(full_stack_trace)
else:
for lines in stacktrace_relevant_lines:
for line in lines:
self.print_stack_trace(line)
print(py_exc.Message)
@classmethod
def print_stack_trace(cls, stack_trace_line):
for level in stack_trace_line.split("', '"):
for line in level.split("\\n"):
if line:
print(line)
def run_app(self):
try:
self.create()
self.native.ThreadException += self.winforms_thread_exception
self.loop.run_forever(self.app_context)
except: # NOQA
traceback.print_exc()
def main_loop(self):
thread = Threading.Thread(Threading.ThreadStart(self.run_app))
thread.SetApartmentState(Threading.ApartmentState.STA)
thread.Start()
thread.Join()
def show_about_dialog(self):
message_parts = []
if self.interface.name is not None:
if self.interface.version is not None:
message_parts.append(
"{name} v{version}".format(
name=self.interface.name,
version=self.interface.version,
)
)
else:
message_parts.append(
"{name}".format(name=self.interface.name)
)
elif self.interface.version is not None:
message_parts.append(
"v{version}".format(version=self.interface.version)
)
if self.interface.author is not None:
message_parts.append(
"Author: {author}".format(author=self.interface.author)
)
if self.interface.description is not None:
message_parts.append(
"\n{description}".format(
description=self.interface.description
)
)
self.interface.main_window.info_dialog(
'About {}'.format(self.interface.name), "\n".join(message_parts)
)
def exit(self):
self._is_exiting = True
self.native.Exit()
def set_main_window(self, window):
self.app_context.MainForm = window._impl.native
def set_on_exit(self, value):
pass
def current_window(self):
self.interface.factory.not_implemented('App.current_window()')
def enter_full_screen(self, windows):
self.interface.factory.not_implemented('App.enter_full_screen()')
def exit_full_screen(self, windows):
self.interface.factory.not_implemented('App.exit_full_screen()')
def set_cursor(self, value):
self.interface.factory.not_implemented('App.set_cursor()')
def show_cursor(self):
self.interface.factory.not_implemented('App.show_cursor()')
def hide_cursor(self):
self.interface.factory.not_implemented('App.hide_cursor()')
def add_background_task(self, handler):
self.loop.call_soon(handler, self)
class DocumentApp(App):
def _create_app_commands(self):
self.interface.commands.add(
toga.Command(
lambda w: self.open_file,
label='Open...',
shortcut=Key.MOD_1 + 'o',
group=toga.Group.FILE,
section=0
),
)
def open_document(self, fileURL):
"""Open a new document in this app.
Args:
fileURL (str): The URL/path to the file to add as a document.
"""
self.interface.factory.not_implemented('DocumentApp.open_document()')
| [
[
[
7,
14
],
[
1254,
1261
]
],
[
[
22,
24
],
[
6600,
6602
],
[
6697,
6699
],
[
6709,
6711
]
],
[
[
32,
35
],
[
3204,
3207
]
],
[
[
43,
52
],
[
7529,
7538
]
],
[
[
61,
65
],
[
2658,
2662
],
[
2800,
2804
],
[
2843,
2847
],
[
2883,
2887
],
[
2989,
2993
],
[
3163,
3167
],
[
3243,
3247
],
[
3434,
3438
],
[
3836,
3840
],
[
3990,
3994
],
[
4063,
4067
],
[
10015,
10019
],
[
10168,
10172
]
],
[
[
83,
86
],
[
3124,
3127
],
[
10129,
10132
]
],
[
[
105,
125
],
[
4484,
4504
]
],
[
[
145,
154
],
[
7594,
7603
],
[
7611,
7620
],
[
7681,
7690
]
],
[
[
156,
164
],
[
1333,
1341
],
[
1381,
1389
],
[
3880,
3888
],
[
4232,
4240
],
[
5258,
5266
]
],
[
[
166,
172
],
[
2079,
2085
]
],
[
[
174,
180
],
[
2327,
2333
],
[
2477,
2483
]
],
[
[
182,
193
],
[
1721,
1732
],
[
1931,
1942
],
[
1958,
1969
],
[
2006,
2017
],
[
2034,
2045
],
[
2255,
2266
],
[
2283,
2294
]
],
[
[
221,
246
],
[
1218,
1243
]
],
[
[
267,
273
],
[
293,
299
]
],
[
[
282,
292
],
[
501,
511
]
],
[
[
471,
474
],
[
9924,
9927
]
],
[
[
9912,
9923
]
]
] |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
SimplePhotogrammetryRoutePlanner
A QGIS plugin
A imple photogrammetry route planner.
Generated by Plugin Builder: http://g-sherman.github.io/Qgis-Plugin-Builder/
-------------------
begin : 2021-04-24
copyright : (C) 2021 by Xiangyong Luo
email : [email protected]
git sha : $Format:%H$
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
This script initializes the plugin, making it known to QGIS.
"""
__version__ = "0.4.0"
# noinspection PyPep8Naming
def classFactory(iface): # pylint: disable=invalid-name
"""Load SimplePhotogrammetryRoutePlanner class from file SimplePhotogrammetryRoutePlanner.
:param iface: A QGIS interface instance.
:type iface: QgsInterface
"""
#
from .SimplePhotogrammetryRoutePlanner import SimplePhotogrammetryRoutePlanner
return SimplePhotogrammetryRoutePlanner(iface)
| [
[
[
1304,
1315
]
],
[
[
1359,
1371
]
]
] |
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 24 14:38:20 2018
dimension reduction with VarianceThreshold using sklearn.
Feature selector that removes all low-variance features.
@author: lenovo
"""
from sklearn.feature_selection import VarianceThreshold
import numpy as np
#
np.random.seed(1)
X = np.random.randn(100, 10)
X = np.hstack([X, np.zeros([100, 5])])
#
def featureSelection_variance(X, thrd):
sel = VarianceThreshold(threshold=thrd)
X_selected = sel.fit_transform(X)
mask = sel.get_support()
return X_selected, mask
X = [[0, 2, 0, 3], [0, 1, 4, 3], [0, 1, 1, 3]]
selector = VarianceThreshold()
selector.fit_transform(X)
selector.variances_
| [
[
[
237,
254
],
[
605,
622
],
[
416,
433
]
],
[
[
262,
273
],
[
276,
278
],
[
298,
300
],
[
327,
329
],
[
341,
343
]
],
[
[
294,
295
],
[
338,
339
]
],
[
[
323,
324
]
],
[
[
370,
395
]
],
[
[
547,
548
],
[
648,
649
]
],
[
[
594,
602
],
[
625,
633
],
[
651,
659
]
]
] |
#!/usr/bin/env python
# coding=utf-8
from my_multi_main3 import main
import numpy as np
import argparse
import time
parser = argparse.ArgumentParser(description='PyTorch MNIST Example')
parser.add_argument('--batch-size', type=int, default=64, metavar='N',
help='input batch size for training (default: 64)')
parser.add_argument('--test-batch-size', type=int, default=1000, metavar='N',
help='input batch size for testing (default: 1000)')
parser.add_argument('--epochs', type=int, default=10, metavar='N',
help='number of epochs to train (default: 10)')
parser.add_argument('--lr', type=float, default=0.01, metavar='LR',
help='learning rate (default: 0.01)')
parser.add_argument('--momentum', type=float, default=0.5, metavar='M',
help='SGD momentum (default: 0.5)')
parser.add_argument('--no-cuda', action='store_true', default=False,
help='disables CUDA training')
parser.add_argument('--seed', type=int, default=1, metavar='S',
help='random seed (default: 1)')
parser.add_argument('--log-interval', type=int, default=10, metavar='N',
help='how many batches to wait before logging training status')
parser.add_argument('--save-model', action='store_true', default=False,
help='For Saving the current Model')
parser.add_argument('--norm-flag', type=bool, default=False,
help='Triggering the Layer Normalization flag for attention scores')
parser.add_argument('--gamma', type=float, default=None,
help='Controlling the sparisty of gfusedmax/sparsemax, the smaller, the more sparse')
parser.add_argument('--lam', type=float, default=1.0,
help='Lambda: Controlling the smoothness of gfusedmax, the larger, the smoother')
parser.add_argument('--max-type', type=str, default='softmax',choices=['softmax','sparsemax','gfusedmax'],
help='mapping function in attention')
parser.add_argument('--optim-type', type=str, default='SGD',choices=['SGD','Adam'],
help='mapping function in attention')
parser.add_argument('--head-cnt', type=int, default=2, metavar='S', choices=[1,2,4,5,10],
help='Number of heads for attention (default: 1)')
args = parser.parse_args()
hyperparameter_choices = {
'lr':list(10**np.arange(-4,-1,0.5)),
'norm_flag': [True,False],
'gamma':list(10**np.arange(-1,3,0.5))+[None,],
'lam':list(10**np.arange(-2,2,0.5)),
'max_type':['softmax','sparsemax','gfusedmax'],
# 'max_type':['sparsemax'],
'optim_type':['SGD','Adam'],
'head_cnt':[1,2,4,5,10,20]
}
param_num = 25
record = np.zeros([param_num,len(hyperparameter_choices)+1])
record_name = 'record3_multi_%s.csv'%time.strftime('%Y-%m-%d_%H-%M-%S',time.localtime())
for n in range(param_num):
for param_index,(k,v) in enumerate(hyperparameter_choices.items()):
print(param_index,k)
value_index = np.random.choice(len(v))
if isinstance(v[value_index],str) or isinstance(v[value_index],bool) or v[value_index] is None:
record[n,param_index] = value_index
else:
record[n,param_index] = v[value_index]
setattr(args,k,v[value_index])
record[n,-1] = main(args)
np.savetxt(record_name, record, delimiter=',')
| [
[
[
65,
69
],
[
3317,
3321
]
],
[
[
77,
88
],
[
2405,
2407
],
[
2480,
2482
],
[
2529,
2531
],
[
2726,
2728
],
[
3017,
3019
],
[
3332,
3334
]
],
[
[
96,
104
],
[
127,
135
]
],
[
[
112,
116
],
[
2815,
2819
],
[
2849,
2853
]
],
[
[
118,
124
],
[
188,
194
],
[
331,
337
],
[
482,
488
],
[
617,
623
],
[
743,
749
],
[
871,
877
],
[
991,
997
],
[
1108,
1114
],
[
1265,
1271
],
[
1394,
1400
],
[
1544,
1550
],
[
1707,
1713
],
[
1863,
1869
],
[
2028,
2034
],
[
2170,
2176
],
[
2339,
2345
]
],
[
[
2332,
2336
],
[
3275,
3279
],
[
3322,
3326
]
],
[
[
2360,
2382
],
[
2750,
2772
],
[
2933,
2955
]
],
[
[
2702,
2711
],
[
2736,
2745
],
[
2882,
2891
]
],
[
[
2717,
2723
],
[
3158,
3164
],
[
3220,
3226
],
[
3302,
3308
],
[
3356,
3362
]
],
[
[
2778,
2789
],
[
3343,
3354
]
],
[
[
2871,
2872
],
[
3165,
3166
],
[
3227,
3228
],
[
3309,
3310
]
],
[
[
2902,
2913
],
[
2980,
2991
],
[
3167,
3178
],
[
3229,
3240
]
],
[
[
2915,
2916
],
[
2992,
2993
],
[
3280,
3281
]
],
[
[
2917,
2918
],
[
3038,
3039
],
[
3064,
3065
],
[
3098,
3099
],
[
3122,
3123
],
[
3244,
3245
],
[
3282,
3283
]
],
[
[
3003,
3014
],
[
3066,
3077
],
[
3100,
3111
],
[
3124,
3135
],
[
3182,
3193
],
[
3246,
3257
],
[
3284,
3295
]
]
] |
"""HDF5 related files.
This file contains a set of functions that related to read and write
HDF5 files.
Author: Yuhuang Hu
Email : [email protected]
"""
from __future__ import print_function, absolute_import
import h5py
from spiker import log
logger = log.get_logger("data-hdf5", log.DEBUG)
def init_hdf5(file_path, mode="w", cam_type="davis"):
"""Init HDF5 file object.
# Parameters
file_path : str
absolute path for the HDF5 file.
mode : str
w : for writing
r : for reading
cam_type : str
davis : for DAVIS camera
dvs : for DVS camera
# Returns
dataset : h5py.File
The file object of the given dataset
"""
if mode == "w":
dataset = h5py.File(file_path, mode=mode)
dataset.create_group("dvs")
dataset.create_group("extra")
if cam_type == "davis":
dataset.create_group("aps")
dataset.create_group("imu")
elif mode == "r":
dataset = h5py.File(file_path, mode=mode)
return dataset
| [
[
[
181,
195
]
],
[
[
197,
212
]
],
[
[
221,
225
],
[
740,
744
],
[
998,
1002
]
],
[
[
246,
249
],
[
260,
263
],
[
288,
291
]
],
[
[
251,
257
]
],
[
[
305,
314
]
]
] |
# automatically generated by the FlatBuffers compiler, do not modify
# namespace: flatbuf
import flatbuffers
class FloatingPoint(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsFloatingPoint(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = FloatingPoint()
x.Init(buf, n + offset)
return x
# FloatingPoint
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# FloatingPoint
def Precision(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int16Flags, o + self._tab.Pos)
return 0
def FloatingPointStart(builder): builder.StartObject(1)
def FloatingPointAddPrecision(builder, precision): builder.PrependInt16Slot(0, precision, 0)
def FloatingPointEnd(builder): return builder.EndObject()
| [
[
[
99,
110
],
[
246,
257
],
[
269,
280
],
[
458,
469
],
[
550,
561
],
[
670,
681
]
],
[
[
118,
131
],
[
322,
335
]
],
[
[
748,
766
]
],
[
[
804,
829
]
],
[
[
897,
913
]
]
] |
"""[Scynced Lights]
Class attributes are "shared"
Instance attributes are not shared.
"""
def sub(x, y):
f
class Light:
pass
a = Light()
b = Ligth()
| [
[
[
96,
99
]
],
[
[
121,
126
],
[
142,
147
]
],
[
[
138,
139
]
],
[
[
150,
151
]
]
] |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<[email protected]>
# http://binux.me
# Created on 2012-11-14 17:09:50
from __future__ import unicode_literals, division, absolute_import
import time
import logging
from collections import deque
try:
from UserDict import DictMixin
except ImportError:
from collections import Mapping as DictMixin
import six
from six import iteritems
from six.moves import cPickle
class BaseCounter(object):
def __init__(self):
raise NotImplementedError
def event(self, value=1):
"""Fire a event."""
raise NotImplementedError
def value(self, value):
"""Set counter value."""
raise NotImplementedError
@property
def avg(self):
"""Get average value"""
raise NotImplementedError
@property
def sum(self):
"""Get sum of counter"""
raise NotImplementedError
def empty(self):
"""Clear counter"""
raise NotImplementedError
class TotalCounter(BaseCounter):
"""Total counter"""
def __init__(self):
self.cnt = 0
def event(self, value=1):
self.cnt += value
def value(self, value):
self.cnt = value
@property
def avg(self):
return self.cnt
@property
def sum(self):
return self.cnt
def empty(self):
return self.cnt == 0
class AverageWindowCounter(BaseCounter):
"""
Record last N(window) value
"""
def __init__(self, window_size=300):
self.window_size = window_size
self.values = deque(maxlen=window_size)
def event(self, value=1):
self.values.append(value)
value = event
@property
def avg(self):
return self.sum / len(self.values)
@property
def sum(self):
return sum(self.values)
def empty(self):
if not self.values:
return True
class TimebaseAverageWindowCounter(BaseCounter):
"""
Record last window_size * window_interval seconds values.
records will trim evert window_interval seconds
"""
def __init__(self, window_size=30, window_interval=10):
self.max_window_size = window_size
self.window_size = 0
self.window_interval = window_interval
self.values = deque(maxlen=window_size)
self.times = deque(maxlen=window_size)
self.cache_value = 0
self.cache_start = None
self._first_data_time = None
def event(self, value=1):
now = time.time()
if self._first_data_time is None:
self._first_data_time = now
if self.cache_start is None:
self.cache_value = value
self.cache_start = now
elif now - self.cache_start > self.window_interval:
self.values.append(self.cache_value)
self.times.append(self.cache_start)
self.on_append(self.cache_value, self.cache_start)
self.cache_value = value
self.cache_start = now
else:
self.cache_value += value
return self
def value(self, value):
self.cache_value = value
def _trim_window(self):
now = time.time()
if self.cache_start and now - self.cache_start > self.window_interval:
self.values.append(self.cache_value)
self.times.append(self.cache_start)
self.on_append(self.cache_value, self.cache_start)
self.cache_value = 0
self.cache_start = None
if self.window_size != self.max_window_size and self._first_data_time is not None:
time_passed = now - self._first_data_time
self.window_size = min(self.max_window_size, time_passed / self.window_interval)
window_limit = now - self.window_size * self.window_interval
while self.times and self.times[0] < window_limit:
self.times.popleft()
self.values.popleft()
@property
def avg(self):
sum = float(self.sum)
if not self.window_size:
return 0
return sum / self.window_size / self.window_interval
@property
def sum(self):
self._trim_window()
return sum(self.values) + self.cache_value
def empty(self):
self._trim_window()
if not self.values and not self.cache_start:
return True
def on_append(self, value, time):
pass
class CounterValue(DictMixin):
"""
A dict like value item for CounterManager.
"""
def __init__(self, manager, keys):
self.manager = manager
self._keys = keys
def __getitem__(self, key):
if key == '__value__':
key = self._keys
return self.manager.counters[key]
else:
key = self._keys + (key, )
available_keys = []
for _key in self.manager.counters:
if _key[:len(key)] == key:
available_keys.append(_key)
if len(available_keys) == 0:
raise KeyError
elif len(available_keys) == 1:
if available_keys[0] == key:
return self.manager.counters[key]
else:
return CounterValue(self.manager, key)
else:
return CounterValue(self.manager, key)
def __len__(self):
return len(self.keys())
def __iter__(self):
return iter(self.keys())
def __contains__(self, key):
return key in self.keys()
def keys(self):
result = set()
for key in self.manager.counters:
if key[:len(self._keys)] == self._keys:
key = key[len(self._keys):]
result.add(key[0] if key else '__value__')
return result
def to_dict(self, get_value=None):
"""Dump counters as a dict"""
result = {}
for key, value in iteritems(self):
if isinstance(value, BaseCounter):
if get_value is not None:
value = getattr(value, get_value)
result[key] = value
else:
result[key] = value.to_dict(get_value)
return result
class CounterManager(DictMixin):
"""
A dict like counter manager.
When using a tuple as event key, say: ('foo', 'bar'), You can visite counter
with manager['foo']['bar']. Or get all counters which first element is 'foo'
by manager['foo'].
It's useful for a group of counters.
"""
def __init__(self, cls=TimebaseAverageWindowCounter):
"""init manager with Counter cls"""
self.cls = cls
self.counters = {}
def event(self, key, value=1):
"""Fire a event of a counter by counter key"""
if isinstance(key, six.string_types):
key = (key, )
assert isinstance(key, tuple), "event key type error"
if key not in self.counters:
self.counters[key] = self.cls()
self.counters[key].event(value)
return self
def value(self, key, value=1):
"""Set value of a counter by counter key"""
if isinstance(key, six.string_types):
key = (key, )
assert isinstance(key, tuple), "event key type error"
if key not in self.counters:
self.counters[key] = self.cls()
self.counters[key].value(value)
return self
def trim(self):
"""Clear not used counters"""
for key, value in list(iteritems(self.counters)):
if value.empty():
del self.counters[key]
def __getitem__(self, key):
key = (key, )
available_keys = []
for _key in self.counters:
if _key[:len(key)] == key:
available_keys.append(_key)
if len(available_keys) == 0:
raise KeyError
elif len(available_keys) == 1:
if available_keys[0] == key:
return self.counters[key]
else:
return CounterValue(self, key)
else:
return CounterValue(self, key)
def __iter__(self):
return iter(self.keys())
def __len__(self):
return len(self.keys())
def keys(self):
result = set()
for key in self.counters:
result.add(key[0] if key else ())
return result
def to_dict(self, get_value=None):
"""Dump counters as a dict"""
self.trim()
result = {}
for key, value in iteritems(self):
if isinstance(value, BaseCounter):
if get_value is not None:
value = getattr(value, get_value)
result[key] = value
else:
result[key] = value.to_dict(get_value)
return result
def dump(self, filename):
"""Dump counters to file"""
try:
with open(filename, 'wb') as fp:
cPickle.dump(self.counters, fp)
except:
logging.error("can't dump counter to file: %s" % filename)
return False
return True
def load(self, filename):
"""Load counters to file"""
try:
with open(filename) as fp:
self.counters = cPickle.load(fp)
except:
logging.debug("can't load counter from file: %s" % filename)
return False
return True
| [
[
[
208,
224
]
],
[
[
226,
234
]
],
[
[
236,
251
]
],
[
[
260,
264
],
[
2554,
2558
],
[
3227,
3231
]
],
[
[
272,
279
],
[
8972,
8979
],
[
9272,
9279
]
],
[
[
304,
309
],
[
1626,
1631
],
[
2337,
2342
],
[
2384,
2389
]
],
[
[
340,
349
],
[
4473,
4482
],
[
6208,
6217
]
],
[
[
398,
418
],
[
4473,
4482
],
[
6208,
6217
]
],
[
[
427,
430
],
[
6769,
6772
],
[
7132,
7135
]
],
[
[
447,
456
],
[
5894,
5903
],
[
7470,
7479
],
[
8480,
8489
]
],
[
[
479,
486
],
[
8912,
8919
],
[
9227,
9234
]
],
[
[
495,
506
],
[
1070,
1081
],
[
1461,
1472
],
[
1990,
2001
],
[
5944,
5955
],
[
8530,
8541
]
],
[
[
1057,
1069
]
],
[
[
1440,
1460
]
],
[
[
1961,
1989
],
[
6526,
6554
]
],
[
[
4460,
4472
],
[
5228,
5240
],
[
5293,
5305
],
[
7995,
8007
],
[
8052,
8064
]
],
[
[
6193,
6207
]
]
] |
# coding=utf-8
# Copyright 2021 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Feature extractor class for ViT."""
from typing import List, Optional, Union
import numpy as np
from PIL import Image
from ...feature_extraction_utils import BatchFeature, FeatureExtractionMixin
from ...file_utils import TensorType
from ...image_utils import IMAGENET_STANDARD_MEAN, IMAGENET_STANDARD_STD, ImageFeatureExtractionMixin, is_torch_tensor
from ...utils import logging
logger = logging.get_logger(__name__)
class ViTFeatureExtractor(FeatureExtractionMixin, ImageFeatureExtractionMixin):
r"""
Constructs a ViT feature extractor.
This feature extractor inherits from :class:`~transformers.FeatureExtractionMixin` which contains most of the main
methods. Users should refer to this superclass for more information regarding those methods.
Args:
do_resize (:obj:`bool`, `optional`, defaults to :obj:`True`):
Whether to resize the input to a certain :obj:`size`.
size (:obj:`int` or :obj:`Tuple(int)`, `optional`, defaults to 224):
Resize the input to the given size. If a tuple is provided, it should be (width, height). If only an
integer is provided, then the input will be resized to (size, size). Only has an effect if :obj:`do_resize`
is set to :obj:`True`.
resample (:obj:`int`, `optional`, defaults to :obj:`PIL.Image.BILINEAR`):
An optional resampling filter. This can be one of :obj:`PIL.Image.NEAREST`, :obj:`PIL.Image.BOX`,
:obj:`PIL.Image.BILINEAR`, :obj:`PIL.Image.HAMMING`, :obj:`PIL.Image.BICUBIC` or :obj:`PIL.Image.LANCZOS`.
Only has an effect if :obj:`do_resize` is set to :obj:`True`.
do_normalize (:obj:`bool`, `optional`, defaults to :obj:`True`):
Whether or not to normalize the input with mean and standard deviation.
image_mean (:obj:`List[int]`, defaults to :obj:`[0.5, 0.5, 0.5]`):
The sequence of means for each channel, to be used when normalizing images.
image_std (:obj:`List[int]`, defaults to :obj:`[0.5, 0.5, 0.5]`):
The sequence of standard deviations for each channel, to be used when normalizing images.
"""
model_input_names = ["pixel_values"]
def __init__(
self,
do_resize=True,
size=224,
resample=Image.BILINEAR,
do_normalize=True,
image_mean=None,
image_std=None,
**kwargs
):
super().__init__(**kwargs)
self.do_resize = do_resize
self.size = size
self.resample = resample
self.do_normalize = do_normalize
self.image_mean = image_mean if image_mean is not None else IMAGENET_STANDARD_MEAN
self.image_std = image_std if image_std is not None else IMAGENET_STANDARD_STD
def __call__(
self,
images: Union[
Image.Image, np.ndarray, "torch.Tensor", List[Image.Image], List[np.ndarray], List["torch.Tensor"] # noqa
],
return_tensors: Optional[Union[str, TensorType]] = None,
**kwargs
) -> BatchFeature:
"""
Main method to prepare for the model one or several image(s).
.. warning::
NumPy arrays and PyTorch tensors are converted to PIL images when resizing, so the most efficient is to pass
PIL images.
Args:
images (:obj:`PIL.Image.Image`, :obj:`np.ndarray`, :obj:`torch.Tensor`, :obj:`List[PIL.Image.Image]`, :obj:`List[np.ndarray]`, :obj:`List[torch.Tensor]`):
The image or batch of images to be prepared. Each image can be a PIL image, NumPy array or PyTorch
tensor. In case of a NumPy array/PyTorch tensor, each image should be of shape (C, H, W), where C is a
number of channels, H and W are image height and width.
return_tensors (:obj:`str` or :class:`~transformers.file_utils.TensorType`, `optional`, defaults to :obj:`'np'`):
If set, will return tensors of a particular framework. Acceptable values are:
* :obj:`'tf'`: Return TensorFlow :obj:`tf.constant` objects.
* :obj:`'pt'`: Return PyTorch :obj:`torch.Tensor` objects.
* :obj:`'np'`: Return NumPy :obj:`np.ndarray` objects.
* :obj:`'jax'`: Return JAX :obj:`jnp.ndarray` objects.
Returns:
:class:`~transformers.BatchFeature`: A :class:`~transformers.BatchFeature` with the following fields:
- **pixel_values** -- Pixel values to be fed to a model, of shape (batch_size, num_channels, height,
width).
"""
# Input type checking for clearer error
valid_images = False
# Check that images has a valid type
if isinstance(images, (Image.Image, np.ndarray)) or is_torch_tensor(images):
valid_images = True
elif isinstance(images, (list, tuple)):
if len(images) == 0 or isinstance(images[0], (Image.Image, np.ndarray)) or is_torch_tensor(images[0]):
valid_images = True
if not valid_images:
raise ValueError(
"Images must of type `PIL.Image.Image`, `np.ndarray` or `torch.Tensor` (single example),"
"`List[PIL.Image.Image]`, `List[np.ndarray]` or `List[torch.Tensor]` (batch of examples)."
)
is_batched = bool(
isinstance(images, (list, tuple))
and (isinstance(images[0], (Image.Image, np.ndarray)) or is_torch_tensor(images[0]))
)
if not is_batched:
images = [images]
# transformations (resizing + normalization)
if self.do_resize and self.size is not None:
images = [self.resize(image=image, size=self.size, resample=self.resample) for image in images]
if self.do_normalize:
images = [self.normalize(image=image, mean=self.image_mean, std=self.image_std) for image in images]
# return as BatchFeature
data = {"pixel_values": images}
encoded_inputs = BatchFeature(data=data, tensor_type=return_tensors)
return encoded_inputs
| [
[
[
685,
689
],
[
3487,
3491
],
[
3506,
3510
],
[
3524,
3528
]
],
[
[
691,
699
],
[
3588,
3596
]
],
[
[
701,
706
],
[
3427,
3432
],
[
3597,
3602
]
],
[
[
715,
726
],
[
3459,
3461
],
[
3511,
3513
],
[
5368,
5370
],
[
5560,
5562
],
[
6054,
6056
]
],
[
[
743,
748
],
[
2915,
2920
],
[
3446,
3451
],
[
3492,
3497
],
[
5355,
5360
],
[
5547,
5552
],
[
6041,
6046
]
],
[
[
790,
802
],
[
3655,
3667
],
[
6623,
6635
]
],
[
[
804,
826
],
[
1080,
1102
]
],
[
[
853,
863
],
[
3608,
3618
]
],
[
[
891,
913
],
[
3268,
3290
]
],
[
[
915,
936
],
[
3356,
3377
]
],
[
[
938,
965
],
[
1104,
1131
]
],
[
[
967,
982
],
[
5384,
5399
],
[
5576,
5591
],
[
6070,
6085
]
],
[
[
1004,
1011
],
[
1023,
1030
]
],
[
[
1014,
1020
]
],
[
[
1060,
1079
]
]
] |
#!/usr/bin/env python3
UNKNOWN = -1
def read_val():
return int(input())
def read_row():
return list(map(int, input().split()))
def read_grid():
return [read_row() for _ in range(read_val())]
def make_blank_row(i):
return [UNKNOWN] * i
def make_blank_grid(n):
return [make_blank_row(i) for i in range(1, n + 1)]
def compute_max_path_sum(grid):
memo = make_blank_grid(len(grid))
def dfs(i, j):
if i == len(grid):
return 0
if memo[i][j] == UNKNOWN:
memo[i][j] = grid[i][j] + max(dfs(i + 1, j), dfs(i + 1, j + 1))
return memo[i][j]
return dfs(0, 0)
for t in range(read_val()):
print(compute_max_path_sum(read_grid()))
| [
[
[
24,
31
],
[
243,
250
],
[
514,
521
]
],
[
[
42,
50
],
[
676,
684
],
[
194,
202
]
],
[
[
83,
91
],
[
168,
176
]
],
[
[
143,
152
],
[
720,
729
]
],
[
[
212,
226
],
[
293,
307
]
],
[
[
261,
276
],
[
381,
396
]
],
[
[
342,
362
],
[
699,
719
]
],
[
[
665,
666
]
]
] |
import platform
# print(platform.system())
operating_system = platform.system().lower()
if operating_system == 'darwin':
from .blender_utils_macos import get_installed_blender_versions
operating_system_name = 'macos'
elif operating_system == 'linux':
from .blender_utils_linux import get_installed_blender_versions
operating_system_name = 'linux'
elif operating_system == 'windows':
from .blender_utils_windows import get_installed_blender_versions
operating_system_name = 'windows'
else:
raise Exception("Unimplemented for OS {}".format(operating_system))
from .blender_utils_web import get_blender_version_download_links
def find_blender(version):
# TODO: add fuzzy version matching, ie. '>=2.80', '~2.80', '<2.80', etc.
installed_versions = get_installed_blender_versions()
if version in installed_versions:
return installed_versions[version]
else:
print("blender version '{}' not found; found {} version(s):".format(version, len(installed_versions)))
for v, path in installed_versions.items():
print(" {}: {}".format(v, path))
print("searching web archive...")
versions = get_blender_version_download_links(version, operating_system_name)
print("found {} download(s) for blender version '{}', platform '{}':".format(len(versions), version, operating_system_name))
for url in versions:
print(" {}".format(url))
if __name__ == '__main__':
for version, exec_path in get_installed_blender_versions().items():
print("found blender {version}: {path}".format(version=version,
path=exec_path))
blender = find_blender('2.80')
if blender:
print("Found blender: '{}'".format(blender))
else:
print("No matching blender version installed :(")
| [
[
[
7,
15
],
[
64,
72
]
],
[
[
45,
61
],
[
93,
109
],
[
232,
248
],
[
370,
386
],
[
568,
584
]
],
[
[
160,
190
],
[
1508,
1538
],
[
785,
815
]
],
[
[
195,
216
],
[
1224,
1245
],
[
1356,
1377
]
],
[
[
298,
328
],
[
1508,
1538
],
[
785,
815
]
],
[
[
333,
354
],
[
1224,
1245
],
[
1356,
1377
]
],
[
[
440,
470
],
[
1508,
1538
],
[
785,
815
]
],
[
[
475,
496
],
[
1224,
1245
],
[
1356,
1377
]
],
[
[
619,
653
],
[
1180,
1214
]
],
[
[
660,
672
],
[
1708,
1720
]
],
[
[
1486,
1493
],
[
1613,
1620
]
],
[
[
1495,
1504
],
[
1682,
1691
]
],
[
[
1698,
1705
],
[
1736,
1743
],
[
1788,
1795
]
]
] |
import functools
import random
from math import cos, pi
import cv2
import kornia
import numpy as np
import torch
from kornia.augmentation import ColorJitter
from data.util import read_img
from PIL import Image
from io import BytesIO
# Get a rough visualization of the above distribution. (Y-axis is meaningless, just spreads data)
from utils.util import opt_get
'''
if __name__ == '__main__':
import numpy as np
import matplotlib.pyplot as plt
data = np.asarray([get_rand() for _ in range(5000)])
plt.plot(data, np.random.uniform(size=(5000,)), 'x')
plt.show()
'''
def kornia_color_jitter_numpy(img, setting):
if setting * 255 > 1:
# I'm using Kornia's ColorJitter, which requires pytorch arrays in b,c,h,w format.
img = torch.from_numpy(img).permute(2,0,1).unsqueeze(0)
img = ColorJitter(setting, setting, setting, setting)(img)
img = img.squeeze(0).permute(1,2,0).numpy()
return img
# Performs image corruption on a list of images from a configurable set of corruption
# options.
class ImageCorruptor:
def __init__(self, opt):
self.opt = opt
self.reset_random()
self.blur_scale = opt['corruption_blur_scale'] if 'corruption_blur_scale' in opt.keys() else 1
self.fixed_corruptions = opt['fixed_corruptions'] if 'fixed_corruptions' in opt.keys() else []
self.num_corrupts = opt['num_corrupts_per_image'] if 'num_corrupts_per_image' in opt.keys() else 0
self.cosine_bias = opt_get(opt, ['cosine_bias'], True)
if self.num_corrupts == 0:
return
else:
self.random_corruptions = opt['random_corruptions'] if 'random_corruptions' in opt.keys() else []
def reset_random(self):
if 'random_seed' in self.opt.keys():
self.rand = random.Random(self.opt['random_seed'])
else:
self.rand = random.Random()
# Feeds a random uniform through a cosine distribution to slightly bias corruptions towards "uncorrupted".
# Return is on [0,1] with a bias towards 0.
def get_rand(self):
r = self.rand.random()
if self.cosine_bias:
return 1 - cos(r * pi / 2)
else:
return r
def corrupt_images(self, imgs, return_entropy=False):
if self.num_corrupts == 0 and not self.fixed_corruptions:
if return_entropy:
return imgs, []
else:
return imgs
if self.num_corrupts == 0:
augmentations = []
else:
augmentations = random.choices(self.random_corruptions, k=self.num_corrupts)
# Sources of entropy
corrupted_imgs = []
entropy = []
undo_fns = []
applied_augs = augmentations + self.fixed_corruptions
for img in imgs:
for aug in augmentations:
r = self.get_rand()
img, undo_fn = self.apply_corruption(img, aug, r, applied_augs)
if undo_fn is not None:
undo_fns.append(undo_fn)
for aug in self.fixed_corruptions:
r = self.get_rand()
img, undo_fn = self.apply_corruption(img, aug, r, applied_augs)
entropy.append(r)
if undo_fn is not None:
undo_fns.append(undo_fn)
# Apply undo_fns after all corruptions are finished, in same order.
for ufn in undo_fns:
img = ufn(img)
corrupted_imgs.append(img)
if return_entropy:
return corrupted_imgs, entropy
else:
return corrupted_imgs
def apply_corruption(self, img, aug, rand_val, applied_augmentations):
undo_fn = None
if 'color_quantization' in aug:
# Color quantization
quant_div = 2 ** (int(rand_val * 10 / 3) + 2)
img = img * 255
img = (img // quant_div) * quant_div
img = img / 255
elif 'color_jitter' in aug:
lo_end = 0
hi_end = .2
setting = rand_val * (hi_end - lo_end) + lo_end
img = kornia_color_jitter_numpy(img, setting)
elif 'gaussian_blur' in aug:
img = cv2.GaussianBlur(img, (0,0), self.blur_scale*rand_val*1.5)
elif 'motion_blur' in aug:
# Motion blur
intensity = self.blur_scale*rand_val * 3 + 1
angle = random.randint(0,360)
k = np.zeros((intensity, intensity), dtype=np.float32)
k[(intensity - 1) // 2, :] = np.ones(intensity, dtype=np.float32)
k = cv2.warpAffine(k, cv2.getRotationMatrix2D((intensity / 2 - 0.5, intensity / 2 - 0.5), angle, 1.0),
(intensity, intensity))
k = k * (1.0 / np.sum(k))
img = cv2.filter2D(img, -1, k)
elif 'block_noise' in aug:
# Large distortion blocks in part of an img, such as is used to mask out a face.
pass
elif 'lq_resampling' in aug:
# Random mode interpolation HR->LR->HR
if 'lq_resampling4x' == aug:
scale = 4
else:
if rand_val < .3:
scale = 1
elif rand_val < .7:
scale = 2
else:
scale = 4
if scale > 1:
interpolation_modes = [cv2.INTER_NEAREST, cv2.INTER_CUBIC, cv2.INTER_LINEAR, cv2.INTER_LANCZOS4]
mode = random.randint(0,4) % len(interpolation_modes)
# Downsample first, then upsample using the random mode.
img = cv2.resize(img, dsize=(img.shape[1]//scale, img.shape[0]//scale), interpolation=mode)
def lq_resampling_undo_fn(scale, img):
return cv2.resize(img, dsize=(img.shape[1]*scale, img.shape[0]*scale), interpolation=cv2.INTER_LINEAR)
undo_fn = functools.partial(lq_resampling_undo_fn, scale)
elif 'color_shift' in aug:
# Color shift
pass
elif 'interlacing' in aug:
# Interlacing distortion
pass
elif 'chromatic_aberration' in aug:
# Chromatic aberration
pass
elif 'noise' in aug:
# Random noise
if 'noise-5' == aug:
noise_intensity = 5 / 255.0
else:
noise_intensity = (rand_val*6) / 255.0
img += np.random.rand(*img.shape) * noise_intensity
elif 'jpeg' in aug:
if 'noise' not in applied_augmentations and 'noise-5' not in applied_augmentations:
if aug == 'jpeg':
lo=10
range=20
elif aug == 'jpeg-low':
lo=15
range=10
elif aug == 'jpeg-medium':
lo=23
range=25
elif aug == 'jpeg-broad':
lo=15
range=60
elif aug == 'jpeg-normal':
lo=47
range=35
else:
raise NotImplementedError("specified jpeg corruption doesn't exist")
# JPEG compression
qf = (int((1-rand_val)*range) + lo)
# Use PIL to perform a mock compression to a data buffer, then swap back to cv2.
img = (img * 255).astype(np.uint8)
img = Image.fromarray(img)
buffer = BytesIO()
img.save(buffer, "JPEG", quality=qf, optimize=True)
buffer.seek(0)
jpeg_img_bytes = np.asarray(bytearray(buffer.read()), dtype="uint8")
img = read_img("buffer", jpeg_img_bytes, rgb=True)
elif 'saturation' in aug:
# Lightening / saturation
saturation = rand_val * .3
img = np.clip(img + saturation, a_max=1, a_min=0)
elif 'greyscale' in aug:
img = np.tile(np.mean(img, axis=2, keepdims=True), [1,1,3])
elif 'none' not in aug:
raise NotImplementedError("Augmentation doesn't exist")
return img, undo_fn
| [
[
[
7,
16
],
[
5932,
5941
]
],
[
[
24,
30
],
[
1805,
1811
],
[
1882,
1888
],
[
2559,
2565
],
[
4420,
4426
],
[
5500,
5506
]
],
[
[
48,
51
],
[
2165,
2168
]
],
[
[
53,
55
],
[
2173,
2175
]
],
[
[
64,
67
],
[
4223,
4226
],
[
4603,
4606
],
[
4621,
4624
],
[
4813,
4816
],
[
5403,
5406
],
[
5422,
5425
],
[
5439,
5442
],
[
5457,
5460
],
[
5642,
5645
],
[
5810,
5813
],
[
5888,
5891
]
],
[
[
75,
81
]
],
[
[
89,
100
],
[
4458,
4460
],
[
4497,
4499
],
[
4550,
4552
],
[
4575,
4577
],
[
4784,
4786
],
[
6468,
6470
],
[
7450,
7452
],
[
7670,
7672
],
[
7918,
7920
],
[
8013,
8015
],
[
8021,
8023
]
],
[
[
108,
113
],
[
768,
773
]
],
[
[
146,
157
],
[
832,
843
]
],
[
[
181,
189
],
[
7744,
7752
]
],
[
[
206,
211
],
[
7482,
7487
]
],
[
[
227,
234
],
[
7528,
7535
]
],
[
[
358,
365
],
[
1493,
1500
]
],
[
[
596,
621
],
[
4128,
4153
]
],
[
[
1057,
1071
]
]
] |
# This test requires CPython3.5
print(b"%%" % ())
print(b"=%d=" % 1)
print(b"=%d=%d=" % (1, 2))
print(b"=%s=" % b"str")
print(b"=%r=" % b"str")
print("PASS") | [] |
#
# test_JpegCompression.py
#
import pytest
import albumentations as A
from .context import TfDataAugmentation as Tfda
from . import test_utils
from .test_utils import TestResult
@pytest.mark.parametrize(
"quality_lower, quality_upper, expected, message", [
# quality_lower
(-1, 100, TestResult.Error,
"quality_lower < min => Error"),
(0, 100, TestResult.OK,
"quality_lower == min => OK"),
(100, 100, TestResult.OK,
"quality_lower == max => OK"),
(101, 100, TestResult.Error,
"quality_lower >= max => Error"),
# quality_upper
(0, -1, TestResult.Error,
"quality_upper < min => Error"),
(0, 0, TestResult.OK,
"quality_upper == min => OK"),
(0, 100, TestResult.OK,
"quality_upper == max => OK"),
(0, 101, TestResult.Error,
"quality_upper > max => Error"),
# Relation
(50, 50, TestResult.OK,
"quality_lower == quality_upper => OK"),
(51, 50, TestResult.Error,
"quality_lower > quality_upper => Error"),
])
def test_hue_shift_limit_value(
quality_lower, quality_upper, expected, message):
try:
Tfda.JpegCompression(
quality_lower=quality_lower,
quality_upper=quality_upper)
actual = TestResult.OK
except ValueError:
actual = TestResult.Error
assert expected == actual, message
def test_call():
quality_lower = 50
quality_upper = 100
tgt_jpeg = Tfda.JpegCompression(
quality_lower=quality_lower,
quality_upper=quality_upper,
p=1.0)
tgt_transform = \
test_utils.make_tgt_transform(tgt_jpeg)
image = test_utils.make_test_image()
tgt_result = tgt_transform(image=image)
actual_image = tgt_result['image']
image_np = image.numpy()
quality = float(tgt_jpeg.get_param('quality'))
expected_image = A.image_compression(
image_np, quality, image_type='.jpg')
test_utils.partial_assert_array(
expected_image, actual_image, 0.6, "image", eps=0.1)
| [
[
[
38,
44
],
[
183,
189
]
],
[
[
52,
71
],
[
1936,
1937
]
],
[
[
93,
119
],
[
1216,
1220
],
[
1528,
1532
]
],
[
[
134,
144
],
[
1669,
1679
],
[
1721,
1731
],
[
2008,
2018
]
],
[
[
169,
179
],
[
307,
317
],
[
384,
394
],
[
458,
468
],
[
532,
542
],
[
634,
644
],
[
709,
719
],
[
781,
791
],
[
853,
863
],
[
950,
960
],
[
1032,
1042
],
[
1337,
1347
],
[
1391,
1401
]
],
[
[
1113,
1139
]
],
[
[
1453,
1462
]
]
] |
import os
from torch.utils.data import DataLoader
from continuum.datasets import CIFAR10, InMemoryDataset
from continuum.datasets import MNIST
import torchvision
from continuum.scenarios import TransformationIncremental
import pytest
import numpy as np
from continuum.transforms.bg_swap import BackgroundSwap
DATA_PATH = os.environ.get("CONTINUUM_DATA_PATH")
# Uncomment for debugging via image output
# import matplotlib.pyplot as plt
def test_bg_swap_fast():
"""
Fast test for background swap.
"""
bg_x = np.ones(shape=[2, 5, 5, 3]) * -1
bg_y = np.random.rand(2)
fg = np.random.normal(loc=.5, scale=.1, size=[5, 5])
bg = InMemoryDataset(bg_x, bg_y)
bg_swap = BackgroundSwap(bg, input_dim=(5, 5), normalize_bg=None)
spliced_1_channel = bg_swap(fg)[:, :, 0]
assert np.array_equal((spliced_1_channel <= -1), (fg <= .5))
@pytest.mark.slow
def test_background_swap_numpy():
"""
Test background swap on a single ndarray input.
"""
mnist = MNIST(DATA_PATH, download=True, train=True)
cifar = CIFAR10(DATA_PATH, download=True, train=True)
bg_swap = BackgroundSwap(cifar, input_dim=(28, 28))
im = mnist.get_data()[0][0]
im = bg_swap(im)
# Uncomment for debugging
# plt.imshow(im, interpolation='nearest')
# plt.show()
@pytest.mark.slow
def test_background_swap_torch():
"""
Test background swap on a single tensor input.
"""
cifar = CIFAR10(DATA_PATH, download=True, train=True)
mnist = torchvision.datasets.MNIST(DATA_PATH, train=True, download=True,
transform=torchvision.transforms.Compose([
torchvision.transforms.ToTensor()
]))
bg_swap = BackgroundSwap(cifar, input_dim=(28, 28))
im = mnist[0][0]
im = bg_swap(im)
# Uncomment for debugging
# plt.imshow(im.permute(1, 2, 0), interpolation='nearest')
# plt.show()
@pytest.mark.slow
def test_background_tranformation():
"""
Example code using TransformationIncremental to create a setting with 3 tasks.
"""
cifar = CIFAR10(DATA_PATH, train=True)
mnist = MNIST(DATA_PATH, download=False, train=True)
nb_task = 3
list_trsf = []
for i in range(nb_task):
list_trsf.append([torchvision.transforms.ToTensor(), BackgroundSwap(cifar, bg_label=i, input_dim=(28, 28)),
torchvision.transforms.ToPILImage()])
scenario = TransformationIncremental(mnist, base_transformations=[torchvision.transforms.ToTensor()],
incremental_transformations=list_trsf)
folder = "tests/samples/background_trsf/"
if not os.path.exists(folder):
os.makedirs(folder)
for task_id, task_data in enumerate(scenario):
task_data.plot(path=folder, title=f"background_{task_id}.jpg", nb_samples=100, shape=[28, 28, 3])
loader = DataLoader(task_data)
_, _, _ = next(iter(loader))
| [
[
[
7,
9
],
[
324,
326
],
[
2723,
2725
],
[
2755,
2757
]
],
[
[
40,
50
],
[
2949,
2959
]
],
[
[
82,
89
],
[
1059,
1066
],
[
1443,
1450
],
[
2148,
2155
]
],
[
[
91,
106
],
[
658,
673
]
],
[
[
138,
143
],
[
1003,
1008
],
[
2191,
2196
]
],
[
[
151,
162
],
[
1502,
1513
],
[
1616,
1627
],
[
1692,
1703
],
[
2326,
2337
],
[
2442,
2453
],
[
2550,
2561
]
],
[
[
195,
220
],
[
2495,
2520
]
],
[
[
228,
234
],
[
872,
878
],
[
1313,
1319
],
[
1983,
1989
]
],
[
[
242,
253
],
[
529,
531
],
[
573,
575
],
[
601,
603
],
[
815,
817
]
],
[
[
296,
310
],
[
701,
715
],
[
1120,
1134
],
[
1784,
1798
],
[
2361,
2375
]
],
[
[
312,
321
],
[
1009,
1018
],
[
1067,
1076
],
[
1451,
1460
],
[
1529,
1538
],
[
2156,
2165
],
[
2197,
2206
]
],
[
[
446,
463
]
],
[
[
893,
919
]
],
[
[
1334,
1360
]
],
[
[
2004,
2033
]
]
] |
# =========================================================================================
# Copyright 2015 Community Information Online Consortium (CIOC) and KCL Software Solutions
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========================================================================================
# std lib
import os
# jQuery and jQueryUI versions
JQUERY_VERSION = "1.6.2"
JQUERY_UI_VERSION = "1.8.16"
# formatting constants
DATE_TEXT_SIZE = 25
TEXT_SIZE = 85
TEXTAREA_COLS = 85
TEXTAREA_ROWS_SHORT = 2
TEXTAREA_ROWS_LONG = 4
TEXTAREA_ROWS_XLONG = 10
MAX_LENGTH_CHECKLIST_NOTES = 255
EMAIL_LENGTH = 60
# application running constants
_app_path = None
_config_file = None
_app_name = None
session_lock_dir = None
publish_dir = None
def update_cache_values():
# called from application init at startup
global _app_path, _config_file, _app_name, session_lock_dir, publish_dir
if _app_path is None:
_app_path = os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..', '..'))
_app_name = os.path.split(_app_path)[1]
_config_file = os.path.join(_app_path, '..', '..', 'config', _app_name + '.ini')
session_lock_dir = os.path.join(_app_path, 'python', 'session_lock')
publish_dir = os.path.join(_app_path, 'python', 'published_files')
try:
os.makedirs(session_lock_dir)
except os.error:
pass
try:
os.makedirs(publish_dir)
except os.error:
pass
| [
[
[
850,
852
],
[
1469,
1471
],
[
1486,
1488
],
[
1499,
1501
],
[
1565,
1567
],
[
1616,
1618
],
[
1709,
1711
],
[
1781,
1783
],
[
1860,
1862
],
[
1905,
1907
],
[
1958,
1960
],
[
1998,
2000
]
],
[
[
885,
899
]
],
[
[
910,
927
]
],
[
[
963,
977
]
],
[
[
983,
992
]
],
[
[
998,
1011
]
],
[
[
1017,
1036
]
],
[
[
1041,
1059
]
],
[
[
1064,
1083
]
],
[
[
1089,
1115
]
],
[
[
1122,
1134
]
],
[
[
1173,
1182
],
[
1430,
1439
]
],
[
[
1190,
1202
]
],
[
[
1210,
1219
]
],
[
[
1227,
1243
]
],
[
[
1251,
1262
]
],
[
[
1276,
1295
]
],
[
[
1457,
1466
],
[
1579,
1588
],
[
1629,
1638
],
[
1722,
1731
],
[
1794,
1803
]
],
[
[
1553,
1562
],
[
1662,
1671
]
],
[
[
1601,
1613
]
],
[
[
1690,
1706
],
[
1872,
1888
]
],
[
[
1767,
1778
],
[
1970,
1981
]
]
] |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy
from .._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class WebSiteManagementClientConfiguration(Configuration):
"""Configuration for WebSiteManagementClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: Your Azure subscription ID. This is a GUID-formatted string (e.g. 00000000-0000-0000-0000-000000000000).
:type subscription_id: str
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
**kwargs: Any
) -> None:
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
super(WebSiteManagementClientConfiguration, self).__init__(**kwargs)
self.credential = credential
self.subscription_id = subscription_id
self.api_version = "2015-08-01"
self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'mgmt-web/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
| [
[
[
487,
490
],
[
1484,
1487
],
[
2190,
2193
]
],
[
[
492,
505
],
[
693,
706
]
],
[
[
544,
557
],
[
873,
886
]
],
[
[
590,
598
],
[
2277,
2285
],
[
2374,
2382
],
[
2465,
2473
],
[
2558,
2566
],
[
2764,
2772
],
[
2870,
2878
],
[
2970,
2978
],
[
3186,
3194
]
],
[
[
636,
656
],
[
2675,
2695
]
],
[
[
681,
688
],
[
2093,
2100
]
],
[
[
807,
827
]
],
[
[
836,
872
],
[
1735,
1771
]
]
] |
import django.http
import unittest.mock
from .. import middleware
def get_response(req):
# dummy get_response, just return an empty response
return django.http.HttpResponse()
def test_leaves_remote_addr_alone_if_no_real_ip():
remote_addr = object()
request = unittest.mock.MagicMock()
request.META = {"REMOTE_ADDR": remote_addr}
middleware.XRealIPMiddleware(get_response)(request)
assert request.META["REMOTE_ADDR"] is remote_addr
def test_switches_out_x_real_ip_if_available():
remote_addr = object()
x_real_ip = object()
request = unittest.mock.MagicMock()
request.META = {"REMOTE_ADDR": remote_addr, "HTTP_X_REAL_IP": x_real_ip}
middleware.XRealIPMiddleware(get_response)(request)
assert request.META["REMOTE_ADDR"] is x_real_ip
assert request.META["HTTP_X_REAL_IP"] is x_real_ip
| [
[
[
7,
18
],
[
160,
166
]
],
[
[
27,
40
],
[
281,
289
],
[
584,
592
]
],
[
[
57,
67
],
[
360,
370
],
[
692,
702
]
],
[
[
74,
86
],
[
389,
401
],
[
721,
733
]
],
[
[
193,
236
]
],
[
[
473,
513
]
]
] |
#!/usr/bin/env python
import time
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setup(21, GPIO.OUT)
GPIO.output(21, GPIO.LOW)
time.sleep(3.00)
GPIO.output(21, GPIO.HIGH)
GPIO.cleanup()
| [
[
[
30,
34
],
[
137,
141
]
],
[
[
43,
59
],
[
62,
66
],
[
75,
79
],
[
85,
89
],
[
100,
104
],
[
110,
114
],
[
126,
130
],
[
155,
159
],
[
171,
175
],
[
182,
186
]
]
] |
from direct.directnotify.DirectNotifyGlobal import directNotify
class Notifier:
def __init__(self, name):
"""
@param name: The name of the notifier. Be sure to add it to your config/Config.prc!
@type name: str
"""
self.notify = directNotify.newCategory(name)
| [
[
[
51,
63
],
[
274,
286
]
],
[
[
72,
80
]
]
] |
import numpy as np
def train_ml_squarer() -> None:
print("Training!")
def square() -> int:
"""Square a number...maybe"""
return np.random.randint(1, 100)
if __name__ == '__main__':
train_ml_squarer() | [
[
[
7,
18
],
[
144,
146
]
],
[
[
25,
41
],
[
203,
219
]
],
[
[
82,
88
]
]
] |
"""
Platformer Game
"""
import arcade
# Constants
SCREEN_WIDTH = 1000
SCREEN_HEIGHT = 650
SCREEN_TITLE = "Platformer"
# Constants used to scale our sprites from their original size
CHARACTER_SCALING = 1
TILE_SCALING = 0.5
COIN_SCALING = 0.5
SPRITE_PIXEL_SIZE = 128
GRID_PIXEL_SIZE = SPRITE_PIXEL_SIZE * TILE_SCALING
# Movement speed of player, in pixels per frame
PLAYER_MOVEMENT_SPEED = 10
GRAVITY = 1
PLAYER_JUMP_SPEED = 20
class MyGame(arcade.Window):
"""
Main application class.
"""
def __init__(self):
# Call the parent class and set up the window
super().__init__(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE)
# Our TileMap Object
self.tile_map = None
# Our Scene Object
self.scene = None
# Separate variable that holds the player sprite
self.player_sprite = None
# Our physics engine
self.physics_engine = None
# A Camera that can be used for scrolling the screen
self.camera = None
# A Camera that can be used to draw GUI elements
self.gui_camera = None
# Keep track of the score
self.score = 0
# Load sounds
self.collect_coin_sound = arcade.load_sound(":resources:sounds/coin1.wav")
self.jump_sound = arcade.load_sound(":resources:sounds/jump1.wav")
arcade.set_background_color(arcade.csscolor.CORNFLOWER_BLUE)
def setup(self):
"""Set up the game here. Call this function to restart the game."""
# Setup the Cameras
self.camera = arcade.Camera(self.width, self.height)
self.gui_camera = arcade.Camera(self.width, self.height)
# Name of map file to load
map_name = ":resources:tiled_maps/map.json"
# Layer specific options are defined based on Layer names in a dictionary
# Doing this will make the SpriteList for the platforms layer
# use spatial hashing for detection.
layer_options = {
"Platforms": {
"use_spatial_hash": True,
},
}
# Read in the tiled map
self.tile_map = arcade.load_tilemap(map_name, TILE_SCALING, layer_options)
# Initialize Scene with our TileMap, this will automatically add all layers
# from the map as SpriteLists in the scene in the proper order.
self.scene = arcade.Scene.from_tilemap(self.tile_map)
# Keep track of the score
self.score = 0
# Set up the player, specifically placing it at these coordinates.
image_source = ":resources:images/animated_characters/female_adventurer/femaleAdventurer_idle.png"
self.player_sprite = arcade.Sprite(image_source, CHARACTER_SCALING)
self.player_sprite.center_x = 128
self.player_sprite.center_y = 128
self.scene.add_sprite("Player", self.player_sprite)
# --- Other stuff
# Set the background color
if self.tile_map.background_color:
arcade.set_background_color(self.tile_map.background_color)
# Create the 'physics engine'
self.physics_engine = arcade.PhysicsEnginePlatformer(
self.player_sprite, gravity_constant=GRAVITY, walls=self.scene["Platforms"]
)
def on_draw(self):
"""Render the screen."""
# Clear the screen to the background color
arcade.start_render()
# Activate the game camera
self.camera.use()
# Draw our Scene
self.scene.draw()
# Activate the GUI camera before drawing GUI elements
self.gui_camera.use()
# Draw our score on the screen, scrolling it with the viewport
score_text = f"Score: {self.score}"
arcade.draw_text(
score_text,
10,
10,
arcade.csscolor.WHITE,
18,
)
def on_key_press(self, key, modifiers):
"""Called whenever a key is pressed."""
if key == arcade.key.UP or key == arcade.key.W:
if self.physics_engine.can_jump():
self.player_sprite.change_y = PLAYER_JUMP_SPEED
arcade.play_sound(self.jump_sound)
elif key == arcade.key.LEFT or key == arcade.key.A:
self.player_sprite.change_x = -PLAYER_MOVEMENT_SPEED
elif key == arcade.key.RIGHT or key == arcade.key.D:
self.player_sprite.change_x = PLAYER_MOVEMENT_SPEED
def on_key_release(self, key, modifiers):
"""Called when the user releases a key."""
if key == arcade.key.LEFT or key == arcade.key.A:
self.player_sprite.change_x = 0
elif key == arcade.key.RIGHT or key == arcade.key.D:
self.player_sprite.change_x = 0
def center_camera_to_player(self):
screen_center_x = self.player_sprite.center_x - (self.camera.viewport_width / 2)
screen_center_y = self.player_sprite.center_y - (
self.camera.viewport_height / 2
)
if screen_center_x < 0:
screen_center_x = 0
if screen_center_y < 0:
screen_center_y = 0
player_centered = screen_center_x, screen_center_y
self.camera.move_to(player_centered)
def on_update(self, delta_time):
"""Movement and game logic"""
# Move the player with the physics engine
self.physics_engine.update()
# See if we hit any coins
coin_hit_list = arcade.check_for_collision_with_list(
self.player_sprite, self.scene["Coins"]
)
# Loop through each coin we hit (if any) and remove it
for coin in coin_hit_list:
# Remove the coin
coin.remove_from_sprite_lists()
# Play a sound
arcade.play_sound(self.collect_coin_sound)
# Add one to the score
self.score += 1
# Position the camera
self.center_camera_to_player()
def main():
"""Main function"""
window = MyGame()
window.setup()
arcade.run()
if __name__ == "__main__":
main()
| [
[
[
31,
37
],
[
444,
450
],
[
1215,
1221
],
[
1290,
1296
],
[
1348,
1354
],
[
1376,
1382
],
[
1558,
1564
],
[
1623,
1629
],
[
2125,
2131
],
[
2362,
2368
],
[
2673,
2679
],
[
2981,
2987
],
[
3110,
3116
],
[
3357,
3363
],
[
3710,
3716
],
[
3796,
3802
],
[
3957,
3963
],
[
3981,
3987
],
[
4122,
4128
],
[
4177,
4183
],
[
4203,
4209
],
[
4302,
4308
],
[
4329,
4335
],
[
4524,
4530
],
[
4550,
4556
],
[
4628,
4634
],
[
4655,
4661
],
[
5410,
5416
],
[
5722,
5728
],
[
5981,
5987
]
],
[
[
51,
63
],
[
609,
621
]
],
[
[
71,
84
],
[
623,
636
]
],
[
[
91,
103
],
[
638,
650
]
],
[
[
183,
200
],
[
2701,
2718
]
],
[
[
205,
217
],
[
305,
317
],
[
2155,
2167
]
],
[
[
224,
236
]
],
[
[
243,
260
],
[
285,
302
]
],
[
[
267,
282
]
],
[
[
367,
388
],
[
4260,
4281
],
[
4385,
4406
]
],
[
[
394,
401
],
[
3191,
3198
]
],
[
[
406,
423
],
[
4088,
4105
]
],
[
[
437,
443
],
[
5949,
5955
]
],
[
[
5904,
5908
],
[
6027,
6031
]
]
] |
#!/usr/bin/env python3
'''
lib/ycmd/start.py
Server bootstrap logic. Includes a utility class for normalizing parameters and
calculating default ones. Also includes a helper to set up the temporary
options file.
'''
import logging
import os
import tempfile
from ..process import (
FileHandles,
Process,
)
from ..util.fs import (
default_python_binary_path,
save_json_file,
)
from ..ycmd.constants import (
YCMD_LOG_SPOOL_OUTPUT,
YCMD_LOG_SPOOL_SIZE,
YCMD_DEFAULT_SERVER_CHECK_INTERVAL_SECONDS,
YCMD_DEFAULT_SERVER_IDLE_SUICIDE_SECONDS,
)
from ..ycmd.settings import (
get_default_settings_path,
generate_settings_data,
)
logger = logging.getLogger('sublime-ycmd.' + __name__)
class StartupParameters(object):
'''
Startup parameters for a ycmd server instance.
Should include all the necessary configuration for creating the ycmd
server process. Also calculates defaults for certain parameters.
'''
def __init__(self, ycmd_root_directory=None,
ycmd_settings_path=None,
working_directory=None,
python_binary_path=None,
server_idle_suicide_seconds=None,
server_check_interval_seconds=None):
self._ycmd_root_directory = None
self._ycmd_settings_path = None
self._working_directory = None
self._python_binary_path = None
self._server_idle_suicide_seconds = None
self._server_check_interval_seconds = None
# additional attributes, can be set via the properties
self._log_level = None
self._stdout_log_path = None
self._stderr_log_path = None
self._keep_logs = None
self.ycmd_root_directory = ycmd_root_directory
self.ycmd_settings_path = ycmd_settings_path
self.working_directory = working_directory
self.python_binary_path = python_binary_path
self.server_idle_suicide_seconds = server_idle_suicide_seconds
self.server_check_interval_seconds = server_check_interval_seconds
@property
def ycmd_root_directory(self):
if self._ycmd_root_directory is None:
logger.warning('no ycmd root directory has been set')
return self._ycmd_root_directory
@ycmd_root_directory.setter
def ycmd_root_directory(self, ycmd_root_directory):
if ycmd_root_directory is not None and \
not isinstance(ycmd_root_directory, str):
raise TypeError(ycmd_root_directory,)
self._ycmd_root_directory = ycmd_root_directory
@property
def ycmd_settings_path(self):
if self._ycmd_settings_path is None:
if self._ycmd_root_directory is not None:
return get_default_settings_path(self._ycmd_root_directory)
logger.warning('no ycmd root directory has been set')
return self._ycmd_settings_path
@ycmd_settings_path.setter
def ycmd_settings_path(self, ycmd_settings_path):
if ycmd_settings_path is not None and \
not isinstance(ycmd_settings_path, str):
raise TypeError(ycmd_settings_path,)
self._ycmd_settings_path = ycmd_settings_path
@property
def working_directory(self):
if self._working_directory is None:
return os.getcwd()
return self._working_directory
@working_directory.setter
def working_directory(self, working_directory):
if working_directory is not None and \
not isinstance(working_directory, str):
raise TypeError(working_directory,)
self._working_directory = working_directory
@property
def python_binary_path(self):
if self._python_binary_path is None:
return default_python_binary_path()
return self._python_binary_path
@python_binary_path.setter
def python_binary_path(self, python_binary_path):
if python_binary_path is not None and \
not isinstance(python_binary_path, str):
raise TypeError(python_binary_path,)
self._python_binary_path = python_binary_path
@property
def server_idle_suicide_seconds(self):
if self._server_idle_suicide_seconds is None:
return YCMD_DEFAULT_SERVER_IDLE_SUICIDE_SECONDS
return self._server_idle_suicide_seconds
@server_idle_suicide_seconds.setter
def server_idle_suicide_seconds(self, server_idle_suicide_seconds):
if server_idle_suicide_seconds is not None and \
not isinstance(server_idle_suicide_seconds, int):
raise TypeError(server_idle_suicide_seconds,)
self._server_idle_suicide_seconds = server_idle_suicide_seconds
@property
def server_check_interval_seconds(self):
if self._server_check_interval_seconds is None:
return YCMD_DEFAULT_SERVER_CHECK_INTERVAL_SECONDS
return self._server_check_interval_seconds
@server_check_interval_seconds.setter
def server_check_interval_seconds(self, server_check_interval_seconds):
if server_check_interval_seconds is not None and \
not isinstance(server_check_interval_seconds, int):
raise TypeError(server_check_interval_seconds,)
self._server_check_interval_seconds = server_check_interval_seconds
@property
def log_level(self):
return self._log_level
@log_level.setter
def log_level(self, log_level):
if log_level is not None and not isinstance(log_level, str):
raise TypeError('log level must be a str: %r' % (log_level))
if log_level is not None and not _is_valid_log_level(log_level):
logger.warning('log level unrecognized: %r', log_level)
# but fall through and do it anyway
self._log_level = log_level
@property
def stdout_log_path(self):
return self._stdout_log_path
@stdout_log_path.setter
def stdout_log_path(self, stdout_log_path):
if stdout_log_path is not None and \
not isinstance(stdout_log_path, str):
raise TypeError(
'stdout log path must be a str: %r' % (stdout_log_path)
)
self._stdout_log_path = stdout_log_path
@property
def stderr_log_path(self):
return self._stderr_log_path
@stderr_log_path.setter
def stderr_log_path(self, stderr_log_path):
if stderr_log_path is not None and \
not isinstance(stderr_log_path, str):
raise TypeError(
'stderr_log_path must be a str: %r' % (stderr_log_path)
)
self._stderr_log_path = stderr_log_path
@property
def keep_logs(self):
if self._keep_logs is None:
return False
return self._keep_logs
@keep_logs.setter
def keep_logs(self, keep_logs):
if keep_logs is not None and not isinstance(keep_logs, bool):
raise TypeError('keep-logs must be a bool: %r' % (keep_logs))
self._keep_logs = keep_logs
@property
def ycmd_module_directory(self):
if self._ycmd_root_directory is None:
logger.error('no ycmd root directory set')
raise AttributeError
return os.path.join(self._ycmd_root_directory, 'ycmd')
def copy(self):
'''
Creates a shallow-copy of the startup parameters.
'''
raw_attrs = [
'_ycmd_root_directory',
'_ycmd_settings_path',
'_working_directory',
'_python_binary_path',
'_server_idle_suicide_seconds',
'_server_check_interval_seconds',
'_log_level',
'_stdout_log_path',
'_stderr_log_path',
'_keep_logs',
]
result = StartupParameters()
for attr in raw_attrs:
attr_value = getattr(self, attr)
setattr(result, attr, attr_value)
return result
def __iter__(self):
''' Dictionary-compatible iterator. '''
return iter((
('ycmd_root_directory', self.ycmd_root_directory),
('ycmd_settings_path', self.ycmd_settings_path),
('working_directory', self.working_directory),
('python_binary_path', self.python_binary_path),
('server_idle_suicide_seconds', self.server_idle_suicide_seconds),
(
'server_check_interval_seconds',
self.server_check_interval_seconds,
),
('ycmd_module_directory', self.ycmd_module_directory),
('log_level', self.log_level),
('stdout_log_path', self.stdout_log_path),
('stderr_log_path', self.stderr_log_path),
('keep_logs', self.keep_logs),
))
def __str__(self):
return (
'ycmd path, default settings path, '
'python binary path, working directory: '
'%(ycmd_root_directory)s, %(ycmd_settings_path)s, '
'%(python_binary_path)s, %(working_directory)s' %
(dict(self))
)
def __repr__(self):
return '%s(%r)' % (StartupParameters, dict(self))
def to_startup_parameters(ycmd_root_directory,
ycmd_settings_path=None,
working_directory=None,
python_binary_path=None,
server_idle_suicide_seconds=None,
server_check_interval_seconds=None):
'''
Internal convenience function. Receives the raw arguments to starting a
ycmd server and returns a `StartupParameters` instance from it.
If the first argument is already `StartupParameters`, it is returned as-is,
and the remaining parameters are ignored.
Otherwise, a `StartupParameters` instance is constructed with all the given
parameters and returned.
'''
if isinstance(ycmd_root_directory, StartupParameters):
# great, already in the desired state
# check if other params are provided and issue a warning
# (they get ignored in that case)
if ycmd_settings_path is not None:
logger.warning(
'ycmd settings path will be ignored: %s', ycmd_settings_path,
)
if working_directory is not None:
logger.warning(
'working directory will be ignored: %s', working_directory,
)
if python_binary_path is not None:
logger.warning(
'python binary path will be ignored: %s', python_binary_path,
)
if server_idle_suicide_seconds is not None:
logger.warning(
'server idle suicide seconds will be ignored: %s',
server_idle_suicide_seconds,
)
if server_check_interval_seconds is not None:
logger.warning(
'server check interval seconds will be ignored: %s',
server_check_interval_seconds,
)
return ycmd_root_directory
# else, generate them
logger.warning('[DEPRECATED] to startup parameters', stack_info=True)
logger.debug(
'generating startup parameters with root: %s', ycmd_root_directory,
)
return StartupParameters(
ycmd_root_directory,
ycmd_settings_path=ycmd_settings_path,
working_directory=working_directory,
python_binary_path=python_binary_path,
server_idle_suicide_seconds=server_idle_suicide_seconds,
server_check_interval_seconds=server_check_interval_seconds,
)
def check_startup_parameters(startup_parameters):
'''
Performs quick, non-blocking validation on startup parameters to catch type
mismatches or empty configurations. Raises an exception or returns `None`.
This is meant to be run on the main thread to catch common startup errors
before initializing the server off-thread. It isn't strictly necessary, but
produces nicer error messages when the plugin is not configured correctly.
NOTE : This does not check the file system for things like missing files,
as that can be a blocking operation.
'''
if not isinstance(startup_parameters, StartupParameters):
raise TypeError(
'startup parameters must be StartupParameters: %r' %
(startup_parameters)
)
ycmd_root_directory = startup_parameters.ycmd_root_directory
if not ycmd_root_directory:
raise RuntimeError('no ycmd root directory has been set')
ycmd_settings_path = startup_parameters.ycmd_settings_path
if not ycmd_settings_path:
raise RuntimeError('no ycmd default settings path has been set')
logger.debug(
'startup parameters seem to be filled in, '
'ready to attempt startup: %r', startup_parameters,
)
def write_ycmd_settings_file(ycmd_settings_path, ycmd_hmac_secret, out=None):
'''
Writes out a ycmd server settings file based on the template file
`ycmd_settings_path`. A uniquely-generated `ycmd_hmac_secret` must also be
supplied, as it needs to be written into this file.
The return value is the path to the settings file, as a `str`.
If `out` is omitted, a secure temporary file is created, and the returned
path should be passed via the options flag to ycmd.
If `out` is provided, it should be a path to an output file (`str`), or a
file-like handle (must support `.write`). This is not recommended for use
with ycmd, as it may be insecure.
'''
ycmd_settings_data = generate_settings_data(
ycmd_settings_path, ycmd_hmac_secret,
)
out_path = None
if out is None:
# no point using `with` for this, since we also use `delete=False`
temp_file_object = tempfile.NamedTemporaryFile(
prefix='ycmd_settings_', suffix='.json', delete=False,
)
temp_file_name = temp_file_object.name
temp_file_handle = temp_file_object.file # type: io.TextIOWrapper
out = temp_file_handle
out_path = temp_file_name
def flush():
temp_file_handle.flush()
def close():
temp_file_object.close()
else:
raise NotImplementedError('unimplemented: output to specific file')
if out_path is None and out is not None:
logger.error('failed to get path for output file: %r', out)
# fall through and write it out anyway
save_json_file(out, ycmd_settings_data)
flush()
close()
logger.debug('successfully wrote file: %s', out_path)
return out_path
def prepare_ycmd_process(startup_parameters, ycmd_settings_tempfile_path,
ycmd_server_hostname, ycmd_server_port):
'''
Initializes and returns a `Process` handle, correctly configured to launch
a ycmd server process. It does not automatically start it though.
The `ycmd_settings_tempfile_path` should be created by (return value of)
`write_ycmd_settings_file`. The ycmd server process will read that file on
startup and then immediately delete it.
The `ycmd_server_hostname` and `ycmd_server_port` must also be provided to
instruct the server to listen on the given address.
'''
assert isinstance(startup_parameters, StartupParameters), \
'startup parameters must be StartupParameters: %r' % \
(startup_parameters)
assert isinstance(ycmd_settings_tempfile_path, str), \
'ycmd settings temporary file path must be a str: %r' % \
(ycmd_settings_tempfile_path)
# this may throw:
check_startup_parameters(startup_parameters)
working_directory = startup_parameters.working_directory
python_binary_path = startup_parameters.python_binary_path
server_idle_suicide_seconds = \
startup_parameters.server_idle_suicide_seconds
server_check_interval_seconds = \
startup_parameters.server_check_interval_seconds
ycmd_module_directory = startup_parameters.ycmd_module_directory
if YCMD_LOG_SPOOL_OUTPUT:
stdout_log_spool = \
tempfile.SpooledTemporaryFile(max_size=YCMD_LOG_SPOOL_SIZE)
stderr_log_spool = \
tempfile.SpooledTemporaryFile(max_size=YCMD_LOG_SPOOL_SIZE)
logger.debug(
'using temporary spools for stdout, stderr: %r, %r',
stdout_log_spool, stderr_log_spool,
)
stdout_handle = stdout_log_spool
stderr_handle = stderr_log_spool
else:
# explicitly close handles - don't inherit from this process
stdout_handle = FileHandles.DEVNULL
stderr_handle = FileHandles.DEVNULL
ycmd_process_handle = Process()
ycmd_process_handle.binary = python_binary_path
ycmd_process_handle.args.extend([
ycmd_module_directory,
'--host=%s' % (ycmd_server_hostname),
'--port=%s' % (ycmd_server_port),
'--idle_suicide_seconds=%s' % (server_idle_suicide_seconds),
'--check_interval_seconds=%s' % (server_check_interval_seconds),
'--options_file=%s' % (ycmd_settings_tempfile_path),
])
ycmd_process_handle.cwd = working_directory
ycmd_process_handle.filehandles.stdout = stdout_handle
ycmd_process_handle.filehandles.stderr = stderr_handle
if startup_parameters.log_level is not None:
add_ycmd_debug_args(
ycmd_process_handle,
log_level=startup_parameters.log_level,
stdout_file_name=startup_parameters.stdout_log_path,
stderr_file_name=startup_parameters.stderr_log_path,
keep_logfiles=startup_parameters.keep_logs,
)
return ycmd_process_handle
def add_ycmd_debug_args(ycmd_process_handle, log_level='info',
stdout_file_name=None, stderr_file_name=None,
keep_logfiles=False):
'''
Adds startup flags to `ycmd_process_handle` to enable logging output.
The `ycmd_process_handle` should be an instance of `Process`.
The `log_level` should be one of 'debug', 'info', 'warning', 'error', or
'critical'. Any `str` is accepted, this routine does not actually check it.
If `stdout_file_name` and `stderr_file_name` are provided, the server will
write log messages to the given files. The bulk of the logs will be on
stderr, with only a few startup messages appearing on stdout.
If `keep_logfiles` is `True`, then the server won't delete the log files
when it exits. Otherwise, the log files will be deleted when it shuts down.
'''
if not isinstance(ycmd_process_handle, Process):
raise TypeError(
'ycmd process handle must be a Process: %r' % (ycmd_process_handle)
)
assert isinstance(ycmd_process_handle, Process)
if ycmd_process_handle.alive():
raise ValueError(
'ycmd process is already started, cannot modify it: %r' %
(ycmd_process_handle)
)
if not _is_valid_log_level(log_level):
logger.warning('log level unrecognized: %r', log_level)
# but fall through and do it anyway
ycmd_debug_args = [
'--log=%s' % (log_level),
]
if stdout_file_name and stderr_file_name:
ycmd_debug_args.extend([
'--stdout=%s' % (stdout_file_name),
'--stderr=%s' % (stderr_file_name),
])
if keep_logfiles:
ycmd_debug_args.append(
'--keep_logfiles',
)
logger.debug('adding ycmd debug args: %r', ycmd_debug_args)
ycmd_process_handle.args.extend(ycmd_debug_args)
def _is_valid_log_level(log_level):
if not isinstance(log_level, str):
raise TypeError('log level must be a str: %r' % (log_level))
# these can be found by running `python /path/to/ycmd/ycmd --help`
recognized_log_levels = [
'debug',
'info',
'warning',
'error',
'critical',
]
return log_level in recognized_log_levels
| [
[
[
225,
232
],
[
674,
681
]
],
[
[
240,
242
],
[
3305,
3307
],
[
7231,
7233
]
],
[
[
250,
258
],
[
13763,
13771
],
[
16058,
16066
],
[
16159,
16167
]
],
[
[
288,
299
],
[
16551,
16562
],
[
16595,
16606
]
],
[
[
305,
312
],
[
16642,
16649
],
[
18549,
18556
],
[
18717,
18724
]
],
[
[
344,
370
],
[
3755,
3781
]
],
[
[
376,
390
],
[
14429,
14443
]
],
[
[
429,
450
],
[
15994,
16015
]
],
[
[
456,
475
],
[
16097,
16116
],
[
16198,
16217
]
],
[
[
481,
523
],
[
4840,
4882
]
],
[
[
529,
569
],
[
4249,
4289
]
],
[
[
607,
632
],
[
2740,
2765
]
],
[
[
638,
660
],
[
13543,
13565
]
],
[
[
665,
671
],
[
2172,
2178
],
[
2805,
2811
],
[
5674,
5680
],
[
7140,
7146
],
[
10141,
10147
],
[
10303,
10309
],
[
10464,
10470
],
[
10636,
10642
],
[
10844,
10850
],
[
11057,
11063
],
[
11131,
11137
],
[
12690,
12696
],
[
14317,
14323
],
[
14499,
14505
],
[
16228,
16234
],
[
18954,
18960
],
[
19422,
19428
]
],
[
[
728,
745
],
[
7777,
7794
],
[
9122,
9139
],
[
9913,
9930
],
[
11239,
11256
],
[
12200,
12217
],
[
15257,
15274
]
],
[
[
9159,
9180
]
],
[
[
11572,
11596
],
[
15561,
15585
]
],
[
[
12828,
12852
]
],
[
[
14579,
14599
]
],
[
[
17637,
17656
],
[
17297,
17316
]
],
[
[
19541,
19560
],
[
5630,
5649
],
[
18914,
18933
]
]
] |
#!/usr/bin/env python
import serial
import sys
import struct
import pprint
import argparse
import code
pp = pprint.PrettyPrinter()
class ConsoleUI:
def opStart(self, name):
sys.stdout.write(name.ljust(40))
def opProgress(self, progress, total=-1):
if (total >= 0):
prstr = "0x%04x / 0x%04x" % (progress, total)
else:
prstr = "0x%04x" % (progress)
sys.stdout.write(prstr.ljust(20))
sys.stdout.write('\x08' * 20)
sys.stdout.flush()
def opEnd(self, result):
sys.stdout.write(result.ljust(20))
sys.stdout.write("\n")
class XFlash:
def __init__(self, serialport):
self.serial = serial.Serial(serialport, baudrate=115200)
def __del__(self):
try:
self.serial.close()
del self.serial
except:
pass
def cmd(self, cmd, argA=0, argB=0):
buffer = struct.pack("<LL", argA, argB)
self.serial.write(bytes([cmd]))
self.serial.write(buffer)
self.serial.flush()
def flashPowerOn(self):
self.cmd(0x10)
def flashShutdown(self):
self.cmd(0x11)
def update(self):
try:
self.cmd(0xF0)
except:
pass
def flashInit(self):
self.cmd(0x03)
buffer = self.serial.read(4)
return struct.unpack("<L", buffer)[0]
def flashDeInit(self):
self.cmd(0x04)
def flashStatus(self):
self.cmd(0x05)
buffer = self.serial.read(2)
return struct.unpack("<H", buffer)[0]
def flashErase(self, block):
self.cmd(0x06, block)
# return self.flashStatus()
def flashReadBlock(self, block):
self.cmd(0x01, block, 528 * 32)
# for i in range(0, 32):
buffer = self.serial.read(528 * 32)
status = self.flashStatus()
return (status, buffer)
def flashWriteBlock(self, block, buffer):
self.cmd(0x02, block, len(buffer))
self.serial.write(buffer)
return self.flashStatus()
# def calcecc(data):
# assert len(data) == 0x210
# val = 0
# for i in range(0x1066):
# if not i & 31:
# v = ~struct.unpack("<L", data[i/8:i/8+4])[0]
# val ^= v & 1
# v >>= 1
# if val & 1:
# val ^= 0x6954559
# val >>= 1
#
# val = ~val
# return data[:-4] + struct.pack("<L", (val << 6) & 0xFFFFFFFF)
#
# def addecc(data, block = 0, off_8 = "\x00" * 4):
# res = ""
# while len(data):
# d = (data[:0x200] + "\x00" * 0x200)[:0x200]
# data = data[0x200:]
#
# d += struct.pack("<L4B4s4s", block / 32, 0, 0xFF, 0, 0, off_8, "\0\0\0\0")
# d = calcecc(d)
# block += 1
# res += d
# return res
def main(argv):
parser = argparse.ArgumentParser(description='XBox 360 NAND Flasher')
parser.add_argument('port', metavar='port', type=str,
help='serial port for comms (e.g. COM5 or /dev/ttyUSB0)')
subparsers = parser.add_subparsers(title='Operations', dest='action')
parser_read = subparsers.add_parser('read', help='Dumps an image from the NAND')
parser_read.add_argument('file', nargs=1, type=argparse.FileType('wb'), help='The file to dump the NAND to')
parser_read.add_argument('start', nargs='?', metavar='start', action='store', type=int, default=0,
help='The block to start the action from')
parser_read.add_argument('end', nargs='?', metavar='end', action='store', type=int, default=0x400,
help='The count of blocks to perform the action to')
parser_write = subparsers.add_parser('write', help='Writes an image into the NAND')
parser_write.add_argument('file', nargs=1, type=argparse.FileType('rb'), help='The image file to write to the NAND')
parser_write.add_argument('start', nargs='?', metavar='start', action='store', type=int, default=0,
help='The block to start the action from')
parser_write.add_argument('end', nargs='?', metavar='end', action='store', type=int, default=0x400,
help='The count of blocks to perform the action to')
# parser_erase = subparsers.add_parser('erase', help='Erases blocks in the NAND')
# parser_erase.add_argument('start', nargs='?', metavar='start', action='store', type=int, default=0,
# help='The block to start the action from')
# parser_erase.add_argument('end', nargs='?', metavar='end', action='store', type=int, default=0x400,
# help='The count of blocks to perform the action to')
#
# parser_update = subparsers.add_parser('update',
# help='Jumps into the bootloader of the NAND Flashing device for updating the firmware')
# parser_shutdown = subparsers.add_parser('shutdown', help='Shuts down the attached XBox 360')
# parser_poweron = subparsers.add_parser('powerup', help='Powers up the attached XBox 360')
arguments = parser.parse_args(argv[1:])
ui = ConsoleUI()
xf = XFlash(arguments.port)
if arguments.action in ('erase', 'write', 'read'):
try:
flash_config = xf.flashInit()
print("FlashConfig: 0x%08x" % (flash_config))
if flash_config <= 0:
raise Exception("FlashConfig invalid!")
except Exception as e:
print("Error!", e)
xf.flashDeInit()
return 1
try:
if arguments.action == 'erase':
# start = 0
# end = (options.flashsize * 1024) / 16
start = arguments.start
end = arguments.end
ui.opStart('Erase')
ui.opProgress(0, end)
for b in range(start, end):
status = xf.flashErase(b)
ui.opProgress(b + 1, end)
ui.opEnd('0x%04x blocks OK' % (end))
if arguments.action == 'read':
# start = 0
# end = (options.flashsize * 1024) / 16
start = arguments.start
end = arguments.end
ui.opStart('Read')
ui.opProgress(0, end)
for b in range(start, end):
(status, buffer) = xf.flashReadBlock(b)
ui.opProgress(b + 1, end)
arguments.file[0].write(buffer)
if arguments.action == 'write':
# start = 0
# end = (options.flashsize * 1024) / 16
start = arguments.start
end = arguments.end
blocksize = 528 * 32
ui.opStart('Write')
ui.opProgress(0, end)
for b in range(start, end):
buffer = arguments.file[0].read(blocksize)
if len(buffer) < blocksize:
buffer += ('\xFF' * (blocksize - len(buffer)))
status = xf.flashWriteBlock(b, buffer)
ui.opProgress(b + 1, end)
#
# if arguments.action == 'update':
# xf.update()
#
# if arguments.action == 'powerup':
# xf.flashPowerOn()
#
# if arguments.action == 'shutdown':
# xf.flashShutdown()
except Exception as e:
raise e
finally:
xf.flashDeInit()
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| [
[
[
29,
35
],
[
693,
699
]
],
[
[
43,
46
],
[
7333,
7336
],
[
7347,
7350
],
[
188,
191
],
[
416,
419
],
[
458,
461
],
[
496,
499
],
[
553,
556
],
[
596,
599
]
],
[
[
54,
60
],
[
924,
930
],
[
1361,
1367
],
[
1547,
1553
]
],
[
[
68,
74
],
[
109,
115
]
],
[
[
82,
90
],
[
2755,
2763
],
[
3168,
3176
],
[
3731,
3739
]
],
[
[
98,
102
]
],
[
[
104,
106
]
],
[
[
140,
149
],
[
5065,
5074
]
],
[
[
627,
633
],
[
5087,
5093
]
],
[
[
2730,
2734
],
[
7342,
7346
]
]
] |
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM16.IEC61968.Common.ActivityRecord import ActivityRecord
class ComplianceEvent(ActivityRecord):
"""Compliance events are used for reporting regulatory or contract compliance issues and/or variances. These might be created as a consequence of local business processes and associated rules. It is anticipated that this class will be customised extensively to meet local implementation needs. Use inherited 'category' to indicate that, for example, expected performance will not be met or reported as mandated.Compliance events are used for reporting regulatory or contract compliance issues and/or variances. These might be created as a consequence of local business processes and associated rules. It is anticipated that this class will be customised extensively to meet local implementation needs. Use inherited 'category' to indicate that, for example, expected performance will not be met or reported as mandated.
"""
def __init__(self, deadline='', *args, **kw_args):
"""Initialises a new 'ComplianceEvent' instance.
@param deadline: The deadline for compliance.
"""
#: The deadline for compliance.
self.deadline = deadline
super(ComplianceEvent, self).__init__(*args, **kw_args)
_attrs = ["deadline"]
_attr_types = {"deadline": str}
_defaults = {"deadline": ''}
_enums = {}
_refs = []
_many_refs = []
| [
[
[
1149,
1163
],
[
1187,
1201
]
],
[
[
1171,
1186
],
[
2305,
2320
]
]
] |
import logging
from django.db.models.query_utils import Q
from django.shortcuts import get_object_or_404
from django.utils.decorators import method_decorator
from django_filters.rest_framework import DjangoFilterBackend
from drf_yasg import openapi
from drf_yasg.openapi import Parameter
from drf_yasg.utils import no_body, swagger_auto_schema
from notifications.signals import notify
from rest_framework import mixins, status, viewsets
from rest_framework.decorators import action
from rest_framework.decorators import parser_classes as dparser_classes
from rest_framework.parsers import FormParser, JSONParser, MultiPartParser
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework_extensions.mixins import DetailSerializerMixin, NestedViewSetMixin
from looking_for_group.mixins import AutoPermissionViewSetMixin, ParentObjectAutoPermissionViewSetMixin
from . import models, serializers
from .signals import player_kicked, player_left
logger = logging.getLogger("api")
parent_lookup_game__slug = Parameter(
name="parent_lookup_game__slug",
in_="path",
type="string",
format=openapi.FORMAT_SLUG,
description="Slug of related game object.",
)
parent_lookup_session__slug = Parameter(
name="parent_lookup_session__slug",
in_="path",
type="string",
format=openapi.FORMAT_SLUG,
description="Slug of related session object.",
)
parent_lookup_session__game__slug = Parameter(
name="parent_lookup_session__game__slug",
in_="path",
type="string",
format=openapi.FORMAT_SLUG,
description="Slug of related game object.",
)
@method_decorator(
name="list",
decorator=swagger_auto_schema(
operation_summary="List Games",
operation_description="Fetch a list of game records. **NOTE**: You will probably want to filter by status at least.",
),
)
@method_decorator(
name="create",
decorator=swagger_auto_schema(
operation_summary="Game: Create",
operation_description="Create a new game posting.",
request_body=serializers.GameDataSerializer,
responses={201: serializers.GameDataSerializer},
),
)
@method_decorator(
name="retrieve",
decorator=swagger_auto_schema(
operation_summary="Game: Details",
operation_description="Fetch the details for the given game. **NOTE**: If you are not a member of the game, only a subset of the available information will be displayed.",
responses={
200: serializers.GameDataSerializer,
403: "You are not authorized to view this game.",
},
),
)
@method_decorator(
name="update",
decorator=swagger_auto_schema(
operation_summary="Game: Update",
operation_description="Update the details of this game. (Only available to GM)",
request_body=serializers.GameDataSerializer,
responses={
200: serializers.GameDataSerializer,
403: "You are not the GM of this game.",
},
),
)
@method_decorator(
name="partial_update",
decorator=swagger_auto_schema(
operation_summary="Game: Update",
operation_description="Update the details of this game. (Only available to GM)",
request_body=serializers.GameDataSerializer,
responses={
200: serializers.GameDataSerializer,
403: "You are not the GM of this game.",
},
),
)
@method_decorator(
name="destroy",
decorator=swagger_auto_schema(
operation_summary="Game: Delete",
operation_description="Delete the given game. (Only available to GM.)",
request_body=no_body,
responses={204: "Game was deleted.", 403: "You are not the GM of this game."},
),
)
@method_decorator(
name="leave",
decorator=swagger_auto_schema(
operation_summary="Game: Leave",
operation_description="Leave the current game. (Players only.)",
request_body=no_body,
reponses={
204: "You have successfully left the game.",
400: "You are not a member of this game.",
403: "You are the GM and cannot leave.",
},
),
)
@method_decorator(
name="apply",
decorator=swagger_auto_schema(
operation_summary="Game: Apply",
operation_description="Apply to join this game.",
request_body=serializers.GameApplicationSerializer,
responses={
201: serializers.GameApplicationSerializer,
400: "You are already a member of this game.",
403: "You are not permitted to apply to this game either due to your access rights or the game's status.",
},
),
)
class GamePostingViewSet(
AutoPermissionViewSetMixin,
DetailSerializerMixin,
NestedViewSetMixin,
viewsets.ModelViewSet,
):
"""
A view set that allows the retrieval and manipulation of posted game data.
"""
permission_classes = (IsAuthenticated,)
parser_classes = [FormParser, MultiPartParser]
model = models.GamePosting
lookup_field = "slug"
lookup_url_kwarg = "slug"
serializer_class = serializers.GameDataListSerializer
serializer_detail_class = serializers.GameDataSerializer
filter_backends = [DjangoFilterBackend]
filterset_fields = [
"published_game",
"game_system",
"published_module",
"status",
"game_type",
"game_mode",
]
permission_type_map = {
**AutoPermissionViewSetMixin.permission_type_map,
"apply": "apply",
"leave": "leave",
}
def get_queryset(self):
gamer = self.request.user.gamerprofile
friends = gamer.friends.all()
communities = [f.id for f in gamer.communities.all()]
game_player_ids = [
obj.game.id
for obj in models.Player.objects.filter(gamer=gamer).select_related("game")
]
q_gm = Q(gm=gamer)
q_gm_is_friend = Q(gm__in=friends) & Q(privacy_level="community")
q_isplayer = Q(id__in=game_player_ids)
q_community = Q(communities__id__in=communities) & Q(privacy_level="community")
q_public = Q(privacy_level="public")
qs = models.GamePosting.objects.filter(
q_gm | q_public | q_gm_is_friend | q_isplayer | q_community
).distinct()
return qs
def create(self, request, *args, **kwargs):
self.serializer_class = serializers.GameDataSerializer
return super().create(request, *args, **kwargs)
def retrieve(self, request, *args, **kwargs):
if not request.user.has_perm("game.is_member", self.get_object()):
logger.debug(
"User is not a member of game, swtiching serializer to list view mode."
)
self.serializer_detail_class = serializers.GameDataListSerializer
return super().retrieve(request, *args, **kwargs)
@action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser])
def apply(self, request, *args, **kwargs):
obj = self.get_object()
logger.debug("Retrieved game object of {}".format(obj))
if request.user.has_perm("game.is_member", obj):
return Response(
data={"errors": "You are already in this game..."},
status=status.HTTP_400_BAD_REQUEST,
)
new_application = serializers.GameApplicationSerializer(
data=request.data, context={"request": request}
)
if not new_application.is_valid():
return Response(
data=new_application.errors, status=status.HTTP_400_BAD_REQUEST
)
app = models.GamePostingApplication.objects.create(
game=obj,
gamer=request.user.gamerprofile,
message=new_application.validated_data["message"],
status="pending",
)
notify.send(
request.user.gamerprofile,
recipient=obj.gm.user,
verb="submitted application",
action_object=app,
target=obj,
)
return Response(
data=serializers.GameApplicationSerializer(
app, context={"request": request}
).data,
status=status.HTTP_201_CREATED,
)
@action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser])
def leave(self, request, *args, **kwargs):
obj = self.get_object()
if request.user == obj.gm.user:
return Response(
data={"errors": "The GM cannot leave the game."},
status=status.HTTP_400_BAD_REQUEST,
)
player = models.Player.objects.get(gamer=request.user.gamerprofile, game=obj)
player_left.send(models.Player, player=player)
player.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
@method_decorator(
name="list",
decorator=swagger_auto_schema(
operation_summary="Game: List Sessions",
operation_description="List the sessions for the given game.",
manual_parameters=[parent_lookup_game__slug],
),
)
@method_decorator(
name="retrieve",
decorator=swagger_auto_schema(
operation_summary="Game Session: Details",
operation_description="Get the details for the given session. **NOTE**: If the user is just a player, the GM notes and player details will not be included.",
manual_parameters=[parent_lookup_game__slug],
responses={
200: serializers.GameSessionGMSerializer,
403: "You are not a member of this game.",
},
),
)
@method_decorator(
name="update",
decorator=swagger_auto_schema(
operation_summary="Game Session: Update",
operation_description="Update details of the game session.",
manual_parameters=[parent_lookup_game__slug],
request_body=serializers.GameSessionGMSerializer,
responses={
200: serializers.GameSessionGMSerializer,
403: "You are not the GM of this game.",
},
),
)
@method_decorator(
name="partial_update",
decorator=swagger_auto_schema(
operation_summary="Game Session: Update",
operation_description="Update details of the game session.",
manual_parameters=[parent_lookup_game__slug],
request_body=serializers.GameSessionGMSerializer,
responses={
200: serializers.GameSessionGMSerializer,
403: "You are not the GM of this game.",
},
),
)
@method_decorator(
name="destroy",
decorator=swagger_auto_schema(
operation_summary="Game Session: Delete",
operation_description="Delete the game session.",
manual_parameters=[parent_lookup_game__slug],
request_body=serializers.GameSessionGMSerializer,
responses={
204: "Session was deleted.",
403: "You are not the GM of this game.",
},
),
)
@method_decorator(
name="cancel",
decorator=swagger_auto_schema(
operation_summary="Game Session: Cancel",
operation_description="Cancel the game session.",
manual_parameters=[parent_lookup_game__slug],
request_body=no_body,
responses={
200: serializers.GameSessionGMSerializer,
400: "This session is already canceled or complete.",
403: "You are not the GM of this game.",
},
),
)
@method_decorator(
name="uncancel",
decorator=swagger_auto_schema(
operation_summary="Game Session: Uncancel",
operation_description="Uncancel the game session.",
manual_parameters=[parent_lookup_game__slug],
request_body=no_body,
responses={
200: serializers.GameSessionGMSerializer,
400: "This session is not canceled.",
403: "You are not the GM of this game.",
},
),
)
@method_decorator(
name="complete",
decorator=swagger_auto_schema(
operation_summary="Game Session: Mark Complete",
operation_description="Mark the game session as complete.",
manual_parameters=[parent_lookup_game__slug],
request_body=no_body,
responses={
200: serializers.GameSessionGMSerializer,
400: "This session is already canceled or complete.",
403: "You are not the GM of this game.",
},
),
)
@method_decorator(
name="uncomplete",
decorator=swagger_auto_schema(
operation_summary="Game Session: Uncomplete",
operation_description="Undo the completion status of the session.",
manual_parameters=[parent_lookup_game__slug],
request_body=no_body,
responses={
200: serializers.GameSessionGMSerializer,
400: "This session isn't marked as complete.",
403: "You are not the GM of this game.",
},
),
)
@method_decorator(
name="reschedule",
decorator=swagger_auto_schema(
operation_summary="Game Session: Reschedule",
operation_description="Reschedule the game session to another date/time.",
manual_parameters=[parent_lookup_game__slug],
request_body=serializers.ScheduleSerializer,
responses={
200: serializers.GameSessionGMSerializer,
400: "Your date and time were invalid or the session is already marked as complete or canceled.",
403: "You are not the GM of this game.",
},
),
)
@method_decorator(
name="addlog",
decorator=swagger_auto_schema(
operation_summary="Game Session: Add Adventure Log",
operation_description="Add an adventure log to this session.",
manual_parameters=[parent_lookup_game__slug],
request_body=serializers.AdventureLogSerializer,
responses={
201: serializers.AdventureLogSerializer,
400: "This session already has an adventure log. You should update that instead.",
403: "You don't have permission to add an adventure log.",
},
),
)
class GameSessionViewSet(
ParentObjectAutoPermissionViewSetMixin,
NestedViewSetMixin,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
viewsets.GenericViewSet,
):
"""
Views for seeing game session data.
"""
model = models.GameSession
serializer_class = serializers.GameSessionSerializer
lookup_field = "slug"
lookup_url_kwarg = "slug"
parent_dependent_actions = [
"create",
"retrieve",
"update",
"partial_update",
"list",
"destroy",
"reschedule",
"cancel",
"uncancel",
"addlog",
"complete",
"uncomplete",
]
parent_lookup_field = "game"
parent_object_model = models.GamePosting
parent_object_lookup_field = "slug"
parent_object_url_kwarg = "parent_lookup_game__slug"
permission_type_map = {
**ParentObjectAutoPermissionViewSetMixin.permission_type_map,
"addlog": "view",
"reschedule": "change",
"cancel": "change",
"uncancel": "change",
"complete": "change",
"uncomplete": "change",
}
permission_type_map["list"] = "view"
def get_parent_game(self):
return get_object_or_404(
models.GamePosting, slug=self.kwargs["parent_lookup_game__slug"]
)
def get_queryset(self):
return self.model.objects.filter(
game__slug=self.kwargs["parent_lookup_game__slug"]
).order_by("-scheduled_time")
def dispatch(self, request, *args, **kwargs):
if (
request.user.is_authenticated
and request.user.gamerprofile == self.get_parent_game().gm
):
self.serializer_class = serializers.GameSessionGMSerializer
return super().dispatch(request, *args, **kwargs)
@action(methods=["post"], detail=True)
def reschedule(self, request, *args, **kwargs):
date_serializer = serializers.ScheduleSerializer(data=request.data)
if not date_serializer.is_valid():
return Response(
data=date_serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
obj = self.get_object()
if obj.status in ["complete", "cancel"]:
return Response(
data={
"errors": "This session is already marked as {} and cannot be rescheduled.".format(
obj.get_status_display()
)
},
status=status.HTTP_400_BAD_REQUEST,
)
obj.move(date_serializer.validated_data["new_scheduled_time"])
return Response(
data=self.serializer_class(obj, context={"request": request}).data,
status=status.HTTP_200_OK,
)
@action(methods=["post"], detail=True)
def complete(self, request, *args, **kwargs):
obj = self.get_object()
if obj.status in ["complete", "cancel"]:
return Response(
data={
"errors": "This object is either already completed or canceled and cannot be toggled to complete."
},
status=status.HTTP_400_BAD_REQUEST,
)
obj.status = "complete"
obj.save()
return Response(
data=self.serializer_class(obj, context={"request": request}).data,
status=status.HTTP_200_OK,
)
@action(methods=["post"], detail=True)
def uncomplete(self, request, *args, **kwargs):
obj = self.get_object()
if obj.status != "complete":
return Response(
data={
"errors": "This object is not completed and so completion cannot be undone."
},
status=status.HTTP_400_BAD_REQUEST,
)
obj.status = "pending"
obj.save()
return Response(
data=self.serializer_class(obj, context={"request": request}).data,
status=status.HTTP_200_OK,
)
@action(methods=["post"], detail=True)
def cancel(self, request, *args, **kwargs):
obj = self.get_object()
if obj.status in ["complete", "cancel"]:
return Response(
data={"errors": "This session is already completed or canceled."},
status=status.HTTP_400_BAD_REQUEST,
)
obj.cancel()
return Response(
data=self.serializer_class(obj, context={"request": request}).data,
status=status.HTTP_200_OK,
)
@action(methods=["post"], detail=True)
def uncancel(self, request, *args, **kwargs):
obj = self.get_object()
if obj.status != "cancel":
return Response(
data={
"errors": "This session is not canceled and can't be changed this way."
},
status=status.HTTP_400_BAD_REQUEST,
)
obj.uncancel()
return Response(
data=self.serializer_class(obj, context={"request": request}).data,
status=status.HTTP_200_OK,
)
@action(methods=["post"], detail=True)
def addlog(self, request, *args, **kwargs):
"""
Create the adventure log for this session.
"""
session = self.get_object()
if hasattr(session, "adventurelog"):
return Response(
data={"errors": "This session already has an adventure log."},
status=status.HTTP_400_BAD_REQUEST,
)
log_serializer = serializers.AdventureLogSerializer(
session=session, data=request.data, context={"request": request}
)
if not log_serializer.is_valid():
return Response(
data=log_serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
new_log = log_serializer.save()
return Response(
data=serializers.AdventureLogSerializer(
new_log, context={"request": request}
).data,
status=status.HTTP_201_CREATED,
)
@method_decorator(
name="retrieve",
decorator=swagger_auto_schema(
operation_summary="Adventure Log: Details",
operation_description="Fetch the details for a given adventure log.",
manual_parameters=[
parent_lookup_session__game__slug,
parent_lookup_session__slug,
],
responses={
200: serializers.AdventureLogSerializer,
403: "You are not a member of this game.",
},
),
)
@method_decorator(
name="update",
decorator=swagger_auto_schema(
operation_summary="Adventure Log: Update",
operation_description="Update the details for a given adventure log.",
manual_parameters=[
parent_lookup_session__game__slug,
parent_lookup_session__slug,
],
request_body=serializers.AdventureLogSerializer,
responses={
200: serializers.AdventureLogSerializer,
403: "You don't have permissions to edit this adventure log.",
},
),
)
@method_decorator(
name="partial_update",
decorator=swagger_auto_schema(
operation_summary="Adventure Log: Update",
operation_description="Update the details for a given adventure log.",
manual_parameters=[
parent_lookup_session__game__slug,
parent_lookup_session__slug,
],
request_body=serializers.AdventureLogSerializer,
responses={
200: serializers.AdventureLogSerializer,
403: "You don't have permissions to edit this adventure log.",
},
),
)
@method_decorator(
name="destroy",
decorator=swagger_auto_schema(
operation_summary="Adventure Log: Delete",
operation_description="Delete a given adventure log.",
manual_parameters=[
parent_lookup_session__game__slug,
parent_lookup_session__slug,
],
request_body=no_body,
responses={
204: "The adventure log was successfully deleted.",
403: "You don't have permissions to edit this adventure log.",
},
),
)
class AdventureLogViewSet(
ParentObjectAutoPermissionViewSetMixin,
NestedViewSetMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
viewsets.GenericViewSet,
):
"""
Allows the manipulation of view sets.
"""
model = models.AdventureLog
parent_lookup_field = "session__game"
parent_object_model = models.GamePosting
parent_object_lookup_field = "slug"
parent_object_url_kwarg = "parent_lookup_session__game__slug"
serializer_class = serializers.AdventureLogSerializer
lookup_field = "slug"
lookup_url_kwarg = "slug"
permission_required = "game.is_member"
permission_type_map = {**ParentObjectAutoPermissionViewSetMixin.permission_type_map}
permission_type_map["list"] = "add"
parent_dependent_actions = [
"create",
"retrieve",
"update",
"partial_update",
"destroy",
]
def get_queryset(self):
return models.AdventureLog.objects.filter(
session__slug=self.kwargs["parent_lookup_session__slug"]
)
@method_decorator(
name="list",
decorator=swagger_auto_schema(
operation_summary="List Your Game Applications",
operation_description="Fetch a list of all your game applications.",
),
)
@method_decorator(
name="retrieve",
decorator=swagger_auto_schema(
operation_summary="Your Game Application: Details",
operation_description="Fetch the details of your game application.",
),
)
@method_decorator(
name="update",
decorator=swagger_auto_schema(
operation_summary="Your Game Application: Update",
operation_description="Update the details of your game application.",
),
)
@method_decorator(
name="partial_update",
decorator=swagger_auto_schema(
operation_summary="Your Game Application: Update",
operation_description="Update the details of your game application.",
),
)
@method_decorator(
name="destroy",
decorator=swagger_auto_schema(
operation_summary="Your Game Application: Withdraw",
operation_description="Withdraw your game application by deleting the record.",
),
)
class GameApplicationViewSet(
AutoPermissionViewSetMixin,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
viewsets.GenericViewSet,
):
"""
View for an applicant to review, create, update, and delete their applications to games.
"""
permission_classes = (IsAuthenticated,)
serializer_class = serializers.GameApplicationSerializer
filter_backends = [DjangoFilterBackend]
filterset_fields = ["status"]
lookup_field = "slug"
lookup_url_kwarg = "slug"
permission_type_map = {**AutoPermissionViewSetMixin.permission_type_map}
def get_queryset(self):
logger.debug("Fetching gamerprofile from request...")
gamer = self.request.user.gamerprofile
logger.debug("Fetching game applications for gamer {}".format(gamer))
qs = models.GamePostingApplication.objects.filter(
gamer=self.request.user.gamerprofile
).order_by("-modified", "-created", "status")
logger.debug(
"Retrieved queryset of length {} for gamer {}".format(
qs.count(), self.request.user.gamerprofile
)
)
return qs
@method_decorator(
name="list",
decorator=swagger_auto_schema(
operation_summary="List Applicants for Game",
operation_description="List the applicants for the current game. (GM Only)",
manual_parameters=[parent_lookup_game__slug],
),
)
@method_decorator(
name="retrieve",
decorator=swagger_auto_schema(
operation_summary="Game Applicant: Details",
operation_description="Fetch details for a given game application. (GM Only)",
manual_parameters=[parent_lookup_game__slug],
reponses={
200: serializers.GameApplicationGMSerializer,
403: "You are not the GM for this game.",
},
),
)
@method_decorator(
name="approve",
decorator=swagger_auto_schema(
operation_summary="Game Applicant: Approve",
operation_description="Approve the game applicant and add as a player to game.",
request_body=no_body,
responses={
201: serializers.PlayerSerializer,
403: "You are not the GM of this game.",
},
),
)
@method_decorator(
name="reject",
decorator=swagger_auto_schema(
operation_summary="Game Applicant: Reject",
operation_description="Reject the game applicant.",
request_body=no_body,
responses={
200: serializers.GameApplicationGMSerializer,
403: "You are not the GM of this game.",
},
),
)
class GMGameApplicationViewSet(
ParentObjectAutoPermissionViewSetMixin,
NestedViewSetMixin,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
viewsets.GenericViewSet,
):
"""
View for a GM to review and approve applicants.
"""
permission_classes = (IsAuthenticated,)
serializer_class = serializers.GameApplicationGMSerializer
filter_backends = [DjangoFilterBackend]
filterset_fields = ["status"]
lookup_field = "slug"
lookup_url_kwarg = "slug"
parent_lookup_field = "game"
parent_object_lookup_field = "slug"
parent_object_model = models.GamePosting
parent_object_url_kwarg = "parent_lookup_game__slug"
parent_dependent_actions = ["list", "retrieve", "approve", "reject"]
permission_type_map = {
**ParentObjectAutoPermissionViewSetMixin.permission_type_map,
"approve": "approve",
"reject": "approve",
}
permission_type_map["retrieve"] = "approve"
permission_type_map["list"] = "approve"
def get_queryset(self):
return models.GamePostingApplication.objects.filter(
game__slug=self.kwargs["parent_lookup_game__slug"]
).exclude(status="new")
def get_parent_game(self):
return get_object_or_404(
models.GamePosting, slug=self.kwargs["parent_lookup_game__slug"]
)
@action(methods=["post"], detail=True)
def approve(self, request, *args, **kwargs):
"""
Approves the game application.
"""
obj = self.get_object()
obj.status = "approve"
player = models.Player.objects.create(game=obj.game, gamer=obj.gamer)
obj.save()
return Response(
data=serializers.PlayerSerializer(
player, context={"request", request}
).data,
status=status.HTTP_201_CREATED,
)
@action(methods=["post"], detail=True)
def reject(self, request, *args, **kwargs):
"""
Rejects the game application.
"""
obj = self.get_object()
obj.status = "deny"
obj.save()
notify.send(
obj,
recipient=obj.gamer.user,
verb="Your player application was not accepted",
action_object=obj,
target=obj.game,
)
return Response(
data=serializers.GameApplicationSerializer(
obj, context={"request": request}
).data,
status=status.HTTP_200_OK,
)
@method_decorator(
name="list",
decorator=swagger_auto_schema(
operation_summary="Game: Player List",
operation_description="List players for a given game",
manual_parameters=[parent_lookup_game__slug],
),
)
@method_decorator(
name="retrieve",
decorator=swagger_auto_schema(
operation_summary="Player: Details",
operation_description="Details for a player record in a given game.",
manual_parameters=[parent_lookup_game__slug],
responses={
200: serializers.PlayerSerializer,
403: "You are not a member of this game.",
},
),
)
@method_decorator(
name="kick",
decorator=swagger_auto_schema(
operation_summary="Player: Kick from game",
operation_description="Kick the player out of the game.",
manual_parameters=[parent_lookup_game__slug],
request_body=no_body,
responses={
204: "Player was removed from the game.",
403: "You are not the GM of this game.",
},
),
)
class PlayerViewSet(
ParentObjectAutoPermissionViewSetMixin,
NestedViewSetMixin,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
viewsets.GenericViewSet,
):
"""
Provides views for players in a given game.
"""
permission_classes = (IsAuthenticated,)
serializer_class = serializers.PlayerSerializer
permission_required = "game.is_member"
lookup_field = "slug"
lookup_url_kwarg = "slug"
parent_lookup_field = "game"
parent_object_model = models.GamePosting
parent_object_lookup_field = "slug"
parent_object_url_kwarg = "parent_lookup_game__slug"
parent_dependent_actions = ["list", "retrieve"]
permission_type_map = {**ParentObjectAutoPermissionViewSetMixin.permission_type_map}
permission_type_map["list"] = "view"
def get_parent_game(self):
return get_object_or_404(
models.GamePosting, slug=self.kwargs["parent_lookup_game__slug"]
)
def get_queryset(self):
return models.Player.objects.filter(game=self.get_parent_game())
@action(methods=["post"], detail=True)
def kick(self, request, *args, **kwargs):
obj = self.get_object()
player_kicked.send(request.user, player=obj)
obj.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
@method_decorator(
name="list",
decorator=swagger_auto_schema(
operation_summary="Game: List Characters",
operation_description="Fetch the list of characters for a given game.",
manual_parameters=[parent_lookup_game__slug],
),
)
@method_decorator(
name="retrieve",
decorator=swagger_auto_schema(
operation_summary="Game: Character Details",
operation_description="Fetch the details of a character for a given game.",
manual_parameters=[parent_lookup_game__slug],
responses={
200: serializers.CharacterSerializer,
403: "You are not a member of this game.",
},
),
)
@method_decorator(
name="update",
decorator=swagger_auto_schema(
operation_summary="Game: Update Character Details",
operation_description="Update the character for the given game.",
manual_parameters=[parent_lookup_game__slug],
request_body=serializers.CharacterSerializer,
responses={
200: serializers.CharacterSerializer,
403: "You are not the owner of this character or the GM of the game.",
},
),
)
@method_decorator(
name="partial_update",
decorator=swagger_auto_schema(
operation_summary="Game: Update Character Details",
operation_description="Update the character for the given game.",
manual_parameters=[parent_lookup_game__slug],
request_body=serializers.CharacterSerializer,
responses={
200: serializers.CharacterSerializer,
403: "You are not the owner of this character or the GM of the game.",
},
),
)
@method_decorator(
name="deactivate",
decorator=swagger_auto_schema(
operation_summary="Game: Deactivate Character",
operation_description="Mark the character as inactive.",
manual_parameters=[parent_lookup_game__slug],
request_body=no_body,
responses={
200: serializers.CharacterSerializer,
400: "This character is already inactive.",
403: "You are not the owner of this character or the GM of the game.",
},
),
)
@method_decorator(
name="reactivate",
decorator=swagger_auto_schema(
operation_summary="Game: Reactivate Character",
operation_description="Mark the character as active.",
manual_parameters=[parent_lookup_game__slug],
request_body=no_body,
responses={
200: serializers.CharacterSerializer,
400: "This character is already active.",
403: "You are not the owner of this character or the GM of the game.",
},
),
)
@method_decorator(
name="destroy",
decorator=swagger_auto_schema(
operation_summary="Game: Delete Character",
operation_description="Delete the character.",
manual_parameters=[parent_lookup_game__slug],
request_body=no_body,
responses={
204: "Character was deleted.",
403: "You are not the owner of this character.",
},
),
)
@method_decorator(
name="approve",
decorator=swagger_auto_schema(
operation_summary="Game: Approve Character",
operation_description="Mark the character as approved (GM Only).",
manual_parameters=[parent_lookup_game__slug],
request_body=no_body,
responses={
200: serializers.CharacterSerializer,
400: "This character is already approved.",
403: "You are not the GM of the game.",
},
),
)
@method_decorator(
name="reject",
decorator=swagger_auto_schema(
operation_summary="Game: Reject Character",
operation_description="Mark the character as rejected (GM Only).",
manual_parameters=[parent_lookup_game__slug],
request_body=no_body,
responses={
200: serializers.CharacterSerializer,
400: "This character is already rejected.",
403: "You are not the GM of the game.",
},
),
)
class CharacterViewSet(
ParentObjectAutoPermissionViewSetMixin, NestedViewSetMixin, viewsets.ModelViewSet
):
"""
Provides views for the characters in a game.
"""
permission_classes = (IsAuthenticated,)
parser_classes = [FormParser, MultiPartParser]
parent_object_lookup_field = "slug"
parent_object_url_kwarg = "parent_lookup_game__slug"
parent_lookup_field = "game"
parent_object_model = models.GamePosting
parent_dependent_actions = ["create", "list", "retrieve"]
serializer_class = serializers.CharacterSerializer
lookup_field = "slug"
lookup_url_kwarg = "slug"
filter_backends = [DjangoFilterBackend]
filterset_fields = ["status"]
parent_game = None
permission_type_map = {
**ParentObjectAutoPermissionViewSetMixin.permission_type_map,
"approve": "approve",
"reject": "approve",
"deactivate": "delete",
"reactivate": "delete",
}
permission_type_map["list"] = "gamelist"
def get_parent_game(self):
if not self.parent_game:
self.parent_game = get_object_or_404(
models.GamePosting, slug=self.kwargs["parent_lookup_game__slug"]
)
return self.parent_game
def get_queryset(self):
return models.Character.objects.filter(game=self.get_parent_game())
def create(self, request, *args, **kwargs):
if request.user.gamerprofile == self.get_parent_game().gm:
return Response(
data={"errors": "Only a player can create a character."},
status=status.HTTP_403_FORBIDDEN,
)
char_ser = serializers.CharacterSerializer(
data=request.data,
context={"request": request, "game": self.get_parent_game()},
)
if not char_ser.is_valid():
return Response(data=char_ser.errors, status=status.HTTP_400_BAD_REQUEST)
char_ser.save()
return Response(data=char_ser.data, status=status.HTTP_201_CREATED)
@action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser])
def approve(self, request, *args, **kwargs):
"""
Approves the proposed character.
"""
obj = self.get_object()
obj.status = "approved"
obj.save()
return Response(
data=self.serializer_class(obj, context={"request": request}).data,
status=status.HTTP_200_OK,
)
@action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser])
def reject(self, request, *args, **kwargs):
"""
Rejects the proposed character.
"""
obj = self.get_object()
obj.status = "rejected"
obj.save()
return Response(
data=self.serializer_class(obj, context={"request": request}).data,
status=status.HTTP_200_OK,
)
@action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser])
def deactivate(self, request, *args, **kwargs):
"""
Make a character inactive.
"""
obj = self.get_object()
obj.status = "inactive"
obj.save()
return Response(
data=self.serializer_class(obj, context={"request": request}).data,
status=status.HTTP_200_OK,
)
@action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser])
def reactivate(self, request, *args, **kwargs):
"""
Reactivate an inactive character.
"""
obj = self.get_object()
obj.status = "pending"
obj.save()
return Response(
data=self.serializer_class(obj, context={"request": request}).data,
status=status.HTTP_200_OK,
)
@method_decorator(
name="list",
decorator=swagger_auto_schema(
operation_summary="List Your Characters",
operation_description="Fetch a list of all of your characters.",
),
)
@method_decorator(
name="retrieve",
decorator=swagger_auto_schema(
operation_summary="Your Character: Details",
operation_description="Fetch the details of your character.",
),
)
@method_decorator(
name="update",
decorator=swagger_auto_schema(
operation_summary="Your Character: Update",
operation_description="Update the details of your character.",
),
)
@method_decorator(
name="partial_update",
decorator=swagger_auto_schema(
operation_summary="Your Character: Update",
operation_description="Update the details of your character.",
),
)
@method_decorator(
name="destroy",
decorator=swagger_auto_schema(
operation_summary="Your Character: Delete",
operation_description="Delete your character.",
request_body=no_body,
responses={204: "Character was deleted."},
),
)
@method_decorator(
name="deactivate",
decorator=swagger_auto_schema(
operation_summary="Your Character: Deactivate",
operation_description="Mark your character as inactive.",
request_body=no_body,
responses={
200: "Character was marked as inactive.",
400: "Character was already inactive.",
},
),
)
@method_decorator(
name="reactivate",
decorator=swagger_auto_schema(
operation_summary="Your Character: Reactivate",
operation_description="Mark your character as active.",
request_body=no_body,
responses={
200: "Character was marked as active.",
400: "Character was already active.",
},
),
)
class MyCharacterViewSet(
AutoPermissionViewSetMixin,
NestedViewSetMixin,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
viewsets.GenericViewSet,
):
"""
Provides a vew so that players can view all their characters in one place.
"""
serializer_class = serializers.CharacterSerializer
permission_classes = (IsAuthenticated,)
lookup_field = "slug"
lookup_url_kwarg = "slug"
filter_backends = [DjangoFilterBackend]
filterset_fields = ["status"]
permission_type_map = {
**AutoPermissionViewSetMixin.permission_type_map,
"deactivate": "delete",
"reactivate": "delete",
}
permission_type_map["retrieve"] = "delete"
parser_classes = [FormParser, MultiPartParser]
def get_queryset(self):
return models.Character.objects.filter(
player__gamer=self.request.user.gamerprofile
)
@action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser])
def deactivate(self, request, *args, **kwargs):
"""
Make a character inactive.
"""
obj = self.get_object()
obj.status = "inactive"
obj.save()
return Response(
data=self.serializer_class(obj, context={"request": request}).data,
status=status.HTTP_200_OK,
)
@action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser])
def reactivate(self, request, *args, **kwargs):
"""
Reactivate an inactive character.
"""
obj = self.get_object()
obj.status = "pending"
obj.save()
return Response(
data=self.serializer_class(obj, context={"request": request}).data,
status=status.HTTP_200_OK,
)
@dparser_classes([FormParser, JSONParser])
def destroy(self, request, *args, **kwargs):
self.parser_classes = [FormParser, JSONParser]
return super().destroy(request, *args, **kwargs)
| [
[
[
7,
14
],
[
1015,
1022
]
],
[
[
57,
58
],
[
5921,
5922
],
[
5958,
5959
],
[
5978,
5979
],
[
6028,
6029
],
[
6076,
6077
],
[
6113,
6114
],
[
6161,
6162
]
],
[
[
88,
105
],
[
15326,
15343
],
[
28403,
28420
],
[
31567,
31584
],
[
37176,
37193
]
],
[
[
142,
158
],
[
1647,
1663
],
[
1893,
1909
],
[
2187,
2203
],
[
2636,
2652
],
[
3035,
3051
],
[
3442,
3458
],
[
3764,
3780
],
[
4184,
4200
],
[
8882,
8898
],
[
9136,
9152
],
[
9631,
9647
],
[
10082,
10098
],
[
10541,
10557
],
[
10969,
10985
],
[
11447,
11463
],
[
11915,
11931
],
[
12412,
12428
],
[
12909,
12925
],
[
13487,
13503
],
[
20194,
20210
],
[
20674,
20690
],
[
21229,
21245
],
[
21792,
21808
],
[
23404,
23420
],
[
23618,
23634
],
[
23839,
23855
],
[
24058,
24074
],
[
24285,
24301
],
[
25724,
25740
],
[
25997,
26013
],
[
26417,
26433
],
[
26803,
26819
],
[
29667,
29683
],
[
29911,
29927
],
[
30305,
30321
],
[
32033,
32049
],
[
32298,
32314
],
[
32709,
32725
],
[
33197,
33213
],
[
33693,
33709
],
[
34204,
34220
],
[
34711,
34727
],
[
35120,
35136
],
[
35604,
35620
],
[
39844,
39860
],
[
40047,
40063
],
[
40254,
40270
],
[
40459,
40475
],
[
40672,
40688
],
[
40944,
40960
],
[
41319,
41335
]
],
[
[
201,
220
],
[
5247,
5266
],
[
24965,
24984
],
[
27557,
27576
],
[
36730,
36749
],
[
42192,
42211
]
],
[
[
242,
249
],
[
1162,
1169
],
[
1360,
1367
],
[
1573,
1580
]
],
[
[
279,
288
],
[
1068,
1077
],
[
1263,
1272
],
[
1470,
1479
]
],
[
[
316,
323
],
[
3658,
3665
],
[
3970,
3977
],
[
11224,
11231
],
[
11708,
11715
],
[
12189,
12196
],
[
12693,
12700
],
[
22127,
22134
],
[
26653,
26660
],
[
27008,
27015
],
[
30568,
30575
],
[
33965,
33972
],
[
34474,
34481
],
[
34966,
34973
],
[
35396,
35403
],
[
35878,
35885
],
[
40874,
40881
],
[
41163,
41170
],
[
41536,
41543
]
],
[
[
325,
344
],
[
1696,
1715
],
[
1944,
1963
],
[
2240,
2259
],
[
2687,
2706
],
[
3094,
3113
],
[
3494,
3513
],
[
3814,
3833
],
[
4234,
4253
],
[
8931,
8950
],
[
9189,
9208
],
[
9682,
9701
],
[
10141,
10160
],
[
10593,
10612
],
[
11020,
11039
],
[
11500,
11519
],
[
11968,
11987
],
[
12467,
12486
],
[
12964,
12983
],
[
13538,
13557
],
[
20247,
20266
],
[
20725,
20744
],
[
21288,
21307
],
[
21844,
21863
],
[
23453,
23472
],
[
23671,
23690
],
[
23890,
23909
],
[
24117,
24136
],
[
24337,
24356
],
[
25773,
25792
],
[
26050,
26069
],
[
26469,
26488
],
[
26854,
26873
],
[
29716,
29735
],
[
29964,
29983
],
[
30354,
30373
],
[
32082,
32101
],
[
32351,
32370
],
[
32760,
32779
],
[
33256,
33275
],
[
33748,
33767
],
[
34259,
34278
],
[
34763,
34782
],
[
35172,
35191
],
[
35655,
35674
],
[
39893,
39912
],
[
40100,
40119
],
[
40305,
40324
],
[
40518,
40537
],
[
40724,
40743
],
[
40999,
41018
],
[
41374,
41393
]
],
[
[
379,
385
],
[
7891,
7897
],
[
29265,
29271
]
],
[
[
413,
419
],
[
14159,
14165
],
[
14186,
14192
],
[
14217,
14223
],
[
14246,
14252
],
[
22414,
22420
],
[
22445,
22451
],
[
22474,
22480
],
[
24582,
24588
],
[
24609,
24615
],
[
24640,
24646
],
[
24669,
24675
],
[
27272,
27278
],
[
27299,
27305
],
[
30817,
30823
],
[
30844,
30850
],
[
41773,
41779
],
[
41800,
41806
],
[
41831,
41837
],
[
41860,
41866
]
],
[
[
421,
427
],
[
7309,
7315
],
[
7611,
7617
],
[
8255,
8261
],
[
8612,
8618
],
[
8851,
8857
],
[
16218,
16224
],
[
16610,
16616
],
[
16848,
16854
],
[
17266,
17272
],
[
17484,
17490
],
[
17870,
17876
],
[
18087,
18093
],
[
18425,
18431
],
[
18613,
18619
],
[
18990,
18996
],
[
19180,
19186
],
[
19589,
19595
],
[
19903,
19909
],
[
20156,
20162
],
[
28989,
28995
],
[
29634,
29640
],
[
32002,
32008
],
[
37669,
37675
],
[
37970,
37976
],
[
38074,
38080
],
[
38505,
38511
],
[
38939,
38945
],
[
39372,
39378
],
[
39811,
39817
],
[
43048,
43054
],
[
43487,
43493
]
],
[
[
429,
437
],
[
4801,
4809
],
[
14276,
14284
],
[
22504,
22512
],
[
24699,
24707
],
[
27330,
27338
],
[
30875,
30883
],
[
36173,
36181
],
[
41890,
41898
]
],
[
[
476,
482
],
[
6910,
6916
],
[
8296,
8302
],
[
15928,
15934
],
[
16884,
16890
],
[
17520,
17526
],
[
18123,
18129
],
[
18649,
18655
],
[
19216,
19222
],
[
28515,
28521
],
[
29030,
29036
],
[
31781,
31787
],
[
38105,
38111
],
[
38541,
38547
],
[
38975,
38981
],
[
39408,
39414
],
[
42651,
42657
],
[
43084,
43090
]
],
[
[
521,
554
],
[
43523,
43538
]
],
[
[
590,
600
],
[
4989,
4999
],
[
6964,
6974
],
[
8350,
8360
],
[
36330,
36340
],
[
38159,
38169
],
[
38595,
38605
],
[
39029,
39039
],
[
39462,
39472
],
[
42472,
42482
],
[
42705,
42715
],
[
43138,
43148
],
[
43540,
43550
],
[
43645,
43655
]
],
[
[
602,
612
],
[
6976,
6986
],
[
8362,
8372
],
[
38171,
38181
],
[
38607,
38617
],
[
39041,
39051
],
[
39474,
39484
],
[
42717,
42727
],
[
43150,
43160
],
[
43552,
43562
],
[
43657,
43667
]
],
[
[
614,
629
],
[
5001,
5016
],
[
36342,
36357
],
[
42484,
42499
]
],
[
[
669,
684
],
[
4949,
4964
],
[
24863,
24878
],
[
27453,
27468
],
[
30994,
31009
],
[
36290,
36305
],
[
42095,
42110
]
],
[
[
721,
729
],
[
7208,
7216
],
[
7549,
7557
],
[
8100,
8108
],
[
8513,
8521
],
[
8835,
8843
],
[
16156,
16164
],
[
16360,
16368
],
[
16739,
16747
],
[
17072,
17080
],
[
17375,
17383
],
[
17698,
17706
],
[
17978,
17986
],
[
18309,
18317
],
[
18504,
18512
],
[
18823,
18831
],
[
19071,
19079
],
[
19477,
19485
],
[
19842,
19850
],
[
20000,
20008
],
[
28840,
28848
],
[
29479,
29487
],
[
31986,
31994
],
[
37562,
37570
],
[
37932,
37940
],
[
38038,
38046
],
[
38396,
38404
],
[
38830,
38838
],
[
39263,
39271
],
[
39702,
39710
],
[
42939,
42947
],
[
43378,
43386
]
],
[
[
775,
796
],
[
4750,
4771
]
],
[
[
798,
816
],
[
4777,
4795
],
[
14135,
14153
],
[
22390,
22408
],
[
27248,
27266
],
[
30793,
30811
],
[
36153,
36171
],
[
41749,
41767
]
],
[
[
855,
881
],
[
4718,
4744
],
[
5474,
5500
],
[
24550,
24576
],
[
25105,
25131
],
[
41717,
41743
],
[
42285,
42311
]
],
[
[
883,
921
],
[
14091,
14129
],
[
14994,
15032
],
[
22346,
22384
],
[
23002,
23040
],
[
27204,
27242
],
[
27954,
27992
],
[
30749,
30787
],
[
31419,
31457
],
[
36113,
36151
],
[
36846,
36884
]
],
[
[
937,
943
],
[
5030,
5036
],
[
14373,
14379
],
[
14840,
14846
],
[
22603,
22609
],
[
22691,
22697
],
[
27767,
27773
],
[
31222,
31228
],
[
36515,
36521
],
[
5831,
5837
],
[
6200,
6206
],
[
7667,
7673
],
[
8672,
8678
],
[
8766,
8772
],
[
15357,
15363
],
[
23286,
23292
],
[
25382,
25388
],
[
28215,
28221
],
[
28434,
28440
],
[
28745,
28751
],
[
31598,
31604
],
[
31717,
31723
],
[
37211,
37217
],
[
37366,
37372
],
[
42545,
42551
]
],
[
[
945,
956
],
[
2088,
2099
],
[
2144,
2155
],
[
2521,
2532
],
[
2860,
2871
],
[
2929,
2940
],
[
3267,
3278
],
[
3336,
3347
],
[
4375,
4386
],
[
4451,
4462
],
[
5128,
5139
],
[
5193,
5204
],
[
9518,
9529
],
[
9897,
9908
],
[
9971,
9982
],
[
10356,
10367
],
[
10430,
10441
],
[
10797,
10808
],
[
11270,
11281
],
[
11754,
11765
],
[
12235,
12246
],
[
12739,
12750
],
[
13197,
13208
],
[
13266,
13277
],
[
13766,
13777
],
[
13839,
13850
],
[
14415,
14426
],
[
20562,
20573
],
[
21024,
21035
],
[
21097,
21108
],
[
21587,
21598
],
[
21660,
21671
],
[
22839,
22850
],
[
24904,
24915
],
[
26301,
26312
],
[
26699,
26710
],
[
27054,
27065
],
[
27494,
27505
],
[
30199,
30210
],
[
31035,
31046
],
[
32600,
32611
],
[
32990,
33001
],
[
33060,
33071
],
[
33486,
33497
],
[
33556,
33567
],
[
34011,
34022
],
[
34520,
34531
],
[
35442,
35453
],
[
35924,
35935
],
[
36619,
36630
],
[
42037,
42048
],
[
6427,
6438
],
[
6811,
6822
],
[
7378,
7389
],
[
8127,
8138
],
[
15828,
15839
],
[
16044,
16055
],
[
19658,
19669
],
[
20027,
20038
],
[
28867,
28878
],
[
29506,
29517
],
[
37729,
37740
]
],
[
[
978,
991
],
[
31905,
31918
]
],
[
[
993,
1004
],
[
8749,
8760
]
],
[
[
1006,
1012
],
[
6652,
6658
],
[
7076,
7082
],
[
25190,
25196
],
[
25299,
25305
],
[
25539,
25545
]
],
[
[
1041,
1065
],
[
9099,
9123
],
[
9454,
9478
],
[
9849,
9873
],
[
10308,
10332
],
[
10749,
10773
],
[
11176,
11200
],
[
11660,
11684
],
[
12141,
12165
],
[
12645,
12669
],
[
13149,
13173
],
[
13718,
13742
],
[
25960,
25984
],
[
26238,
26262
],
[
29874,
29898
],
[
30135,
30159
],
[
30520,
30544
],
[
32261,
32285
],
[
32536,
32560
],
[
32942,
32966
],
[
33438,
33462
],
[
33917,
33941
],
[
34426,
34450
],
[
34918,
34942
],
[
35348,
35372
],
[
35830,
35854
]
],
[
[
1233,
1260
],
[
20485,
20512
],
[
20963,
20990
],
[
21526,
21553
],
[
22066,
22093
]
],
[
[
1434,
1467
],
[
20438,
20471
],
[
20916,
20949
],
[
21479,
21512
],
[
22019,
22052
]
],
[
[
4694,
4712
]
],
[
[
14067,
14085
]
],
[
[
22321,
22340
]
],
[
[
24522,
24544
]
],
[
[
27174,
27198
]
],
[
[
30730,
30743
]
],
[
[
36091,
36107
]
],
[
[
41693,
41711
]
]
] |
"""Mypy style test cases for SQLAlchemy stubs and plugin."""
import os
import os.path
import sys
import pytest # type: ignore # no pytest in typeshed
from mypy.test.config import test_temp_dir
from mypy.test.data import DataDrivenTestCase, DataSuite
from mypy.test.helpers import assert_string_arrays_equal
from mypy.util import try_find_python2_interpreter
from mypy import api
this_file_dir = os.path.dirname(os.path.realpath(__file__))
prefix = os.path.dirname(this_file_dir)
inipath = os.path.abspath(os.path.join(prefix, 'test'))
# Locations of test data files such as test case descriptions (.test).
test_data_prefix = os.path.join(prefix, 'test', 'test-data')
class SQLDataSuite(DataSuite):
files = ['sqlalchemy-basics.test',
'sqlalchemy-sql-elements.test',
'sqlalchemy-sql-sqltypes.test',
'sqlalchemy-sql-selectable.test',
'sqlalchemy-sql-schema.test',
'sqlalchemy-plugin-features.test',
'sqlalchemy-plugin-query.test']
data_prefix = test_data_prefix
def run_case(self, testcase: DataDrivenTestCase) -> None:
assert testcase.old_cwd is not None, "test was not properly set up"
mypy_cmdline = [
'--show-traceback',
'--no-silence-site-packages',
'--config-file={}/sqlalchemy.ini'.format(inipath),
]
py2 = testcase.name.lower().endswith('python2')
if py2:
if try_find_python2_interpreter() is None:
pytest.skip()
return
mypy_cmdline.append('--py2')
else:
mypy_cmdline.append('--python-version={}'.format('.'.join(map(str,
sys.version_info[:2]))))
# Write the program to a file.
program_path = os.path.join(test_temp_dir, 'main.py')
mypy_cmdline.append(program_path)
with open(program_path, 'w') as file:
for s in testcase.input:
file.write('{}\n'.format(s))
output = []
# Type check the program.
out, err, returncode = api.run(mypy_cmdline)
# split lines, remove newlines, and remove directory of test case
for line in (out + err).splitlines():
if line.startswith(test_temp_dir + os.sep):
output.append(line[len(test_temp_dir + os.sep):].rstrip("\r\n").replace('.py',
''))
else:
output.append(line.rstrip("\r\n"))
# Remove temp file.
os.remove(program_path)
assert_string_arrays_equal(testcase.output, output,
'Invalid output ({}, line {})'.format(
testcase.file, testcase.line))
| [
[
[
69,
71
]
],
[
[
79,
86
],
[
401,
403
],
[
417,
419
],
[
454,
456
],
[
495,
497
],
[
511,
513
],
[
632,
634
],
[
1842,
1844
],
[
2325,
2327
],
[
2389,
2391
],
[
2627,
2629
]
],
[
[
94,
97
],
[
1754,
1757
]
],
[
[
106,
112
],
[
1509,
1515
]
],
[
[
184,
197
],
[
1855,
1868
],
[
2309,
2322
],
[
2373,
2386
]
],
[
[
225,
243
],
[
1088,
1106
]
],
[
[
245,
254
],
[
695,
704
]
],
[
[
285,
311
],
[
2659,
2685
]
],
[
[
334,
362
],
[
1453,
1481
]
],
[
[
380,
383
],
[
2136,
2139
]
],
[
[
385,
398
],
[
470,
483
]
],
[
[
445,
451
],
[
524,
530
],
[
645,
651
]
],
[
[
485,
492
],
[
1346,
1353
]
],
[
[
613,
629
],
[
1037,
1053
]
],
[
[
682,
694
]
]
] |
#!/usr/bin/env python
# Copyright (C) 2017 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Generates the text to paste into the email for announcing a new
# release of Gerrit. The text is generated based on a template that
# is filled with values either passed to the script or calculated
# at runtime.
#
# The script outputs a plain text file with the announcement text:
#
# release-announcement-gerrit-X.Y.txt
#
# and, if GPG is available, the announcement text wrapped with a
# signature:
#
# release-announcement-gerrit-X.Y.txt.asc
#
# Usage:
#
# ./tools/release-announcement.py -v 2.14.2 -p 2.14.1 \
# -s "This release fixes several bugs since 2.14.1"
#
# Parameters:
#
# --version (-v): The version of Gerrit being released.
#
# --previous (-p): The previous version of Gerrit. Optional. If
# specified, the generated text includes a link to the gitiles
# log of commits between the previous and new versions.
#
# --summary (-s): Short summary of the release. Optional. When
# specified, the summary is inserted in the introductory sentence
# of the generated text.
#
# Prerequisites:
#
# - The Jinja2 python library [1] must be installed.
#
# - For GPG signing to work, the python-gnupg library [2] must be
# installed, and the ~/.gnupg folder must exist.
#
# - The war file must have been installed to the local Maven repository
# using the `./tools/mvn/api.sh war_install` command.
#
# [1] http://jinja.pocoo.org/
# [2] http://pythonhosted.org/gnupg/
from __future__ import print_function
import argparse
import hashlib
import os
import sys
from gnupg import GPG
from jinja2 import Template
class Version:
def __init__(self, version):
self.version = version
parts = version.split('.')
if len(parts) > 2:
self.major = ".".join(parts[:2])
self.patch = version
else:
self.major = version
self.patch = None
def __str__(self):
return self.version
def _main():
descr = 'Generate Gerrit release announcement email text'
parser = argparse.ArgumentParser(
description=descr,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-v', '--version', dest='version',
required=True,
help='gerrit version to release')
parser.add_argument('-p', '--previous', dest='previous',
help='previous gerrit version (optional)')
parser.add_argument('-s', '--summary', dest='summary',
help='summary of the release content (optional)')
options = parser.parse_args()
summary = options.summary
if summary and not summary.endswith("."):
summary = summary + "."
data = {
"version": Version(options.version),
"previous": options.previous,
"summary": summary
}
war = os.path.join(
os.path.expanduser("~/.m2/repository/com/google/gerrit/gerrit-war/"),
"%(version)s/gerrit-war-%(version)s.war" % data)
if not os.path.isfile(war):
print("Could not find war file for Gerrit %s in local Maven repository"
% data["version"], file=sys.stderr)
sys.exit(1)
md5 = hashlib.md5()
sha1 = hashlib.sha1()
sha256 = hashlib.sha256()
BUF_SIZE = 65536 # Read data in 64kb chunks
with open(war, 'rb') as f:
while True:
d = f.read(BUF_SIZE)
if not d:
break
md5.update(d)
sha1.update(d)
sha256.update(d)
data["sha1"] = sha1.hexdigest()
data["sha256"] = sha256.hexdigest()
data["md5"] = md5.hexdigest()
template = Template(open("tools/release-announcement-template.txt").read())
output = template.render(data=data)
filename = "release-announcement-gerrit-%s.txt" % data["version"]
with open(filename, "w") as f:
f.write(output)
gpghome = os.path.abspath(os.path.expanduser("~/.gnupg"))
if not os.path.isdir(gpghome):
print("Skipping signing due to missing gnupg home folder")
else:
try:
gpg = GPG(homedir=gpghome)
except TypeError:
gpg = GPG(gnupghome=gpghome)
signed = gpg.sign(output)
filename = filename + ".asc"
with open(filename, "w") as f:
f.write(str(signed))
if __name__ == "__main__":
_main()
| [
[
[
2044,
2058
]
],
[
[
2066,
2074
],
[
2601,
2609
],
[
2677,
2685
]
],
[
[
2082,
2089
],
[
3763,
3770
],
[
3788,
3795
],
[
3816,
3823
]
],
[
[
2097,
2099
],
[
3421,
3423
],
[
3443,
3445
],
[
3581,
3583
],
[
4469,
4471
],
[
4485,
4487
],
[
4528,
4530
]
],
[
[
2107,
2110
],
[
3720,
3723
],
[
3740,
3743
]
],
[
[
2129,
2132
],
[
4660,
4663
],
[
4725,
4728
]
],
[
[
2152,
2160
],
[
4219,
4227
]
],
[
[
2169,
2176
],
[
3311,
3318
]
],
[
[
2517,
2522
],
[
4924,
4929
]
]
] |
# coding: utf-8
"""
An API to insert and retrieve metadata on cloud artifacts.
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v1alpha1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class ApiArtifact(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'name': 'str',
'checksum': 'str',
'id': 'str',
'names': 'list[str]'
}
attribute_map = {
'name': 'name',
'checksum': 'checksum',
'id': 'id',
'names': 'names'
}
def __init__(self, name=None, checksum=None, id=None, names=None): # noqa: E501
"""ApiArtifact - a model defined in Swagger""" # noqa: E501
self._name = None
self._checksum = None
self._id = None
self._names = None
self.discriminator = None
if name is not None:
self.name = name
if checksum is not None:
self.checksum = checksum
if id is not None:
self.id = id
if names is not None:
self.names = names
@property
def name(self):
"""Gets the name of this ApiArtifact. # noqa: E501
Name of the artifact. This may be the path to a binary or jar file, or in the case of a container build, the name used to push the container image to Google Container Registry, as presented to `docker push`. This field is deprecated in favor of the plural `names` field; it continues to exist here to allow existing BuildProvenance serialized to json in google.devtools.containeranalysis.v1alpha1.BuildDetails.provenance_bytes to deserialize back into proto. # noqa: E501
:return: The name of this ApiArtifact. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this ApiArtifact.
Name of the artifact. This may be the path to a binary or jar file, or in the case of a container build, the name used to push the container image to Google Container Registry, as presented to `docker push`. This field is deprecated in favor of the plural `names` field; it continues to exist here to allow existing BuildProvenance serialized to json in google.devtools.containeranalysis.v1alpha1.BuildDetails.provenance_bytes to deserialize back into proto. # noqa: E501
:param name: The name of this ApiArtifact. # noqa: E501
:type: str
"""
self._name = name
@property
def checksum(self):
"""Gets the checksum of this ApiArtifact. # noqa: E501
Hash or checksum value of a binary, or Docker Registry 2.0 digest of a container. # noqa: E501
:return: The checksum of this ApiArtifact. # noqa: E501
:rtype: str
"""
return self._checksum
@checksum.setter
def checksum(self, checksum):
"""Sets the checksum of this ApiArtifact.
Hash or checksum value of a binary, or Docker Registry 2.0 digest of a container. # noqa: E501
:param checksum: The checksum of this ApiArtifact. # noqa: E501
:type: str
"""
self._checksum = checksum
@property
def id(self):
"""Gets the id of this ApiArtifact. # noqa: E501
:return: The id of this ApiArtifact. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this ApiArtifact.
:param id: The id of this ApiArtifact. # noqa: E501
:type: str
"""
self._id = id
@property
def names(self):
"""Gets the names of this ApiArtifact. # noqa: E501
Related artifact names. This may be the path to a binary or jar file, or in the case of a container build, the name used to push the container image to Google Container Registry, as presented to `docker push`. Note that a single Artifact ID can have multiple names, for example if two tags are applied to one image. # noqa: E501
:return: The names of this ApiArtifact. # noqa: E501
:rtype: list[str]
"""
return self._names
@names.setter
def names(self, names):
"""Sets the names of this ApiArtifact.
Related artifact names. This may be the path to a binary or jar file, or in the case of a container build, the name used to push the container image to Google Container Registry, as presented to `docker push`. Note that a single Artifact ID can have multiple names, for example if two tags are applied to one image. # noqa: E501
:param names: The names of this ApiArtifact. # noqa: E501
:type: list[str]
"""
self._names = names
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ApiArtifact, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ApiArtifact):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
[
[
328,
334
],
[
6261,
6267
]
],
[
[
342,
344
]
],
[
[
367,
370
],
[
5335,
5338
]
],
[
[
379,
390
],
[
6039,
6050
],
[
6501,
6512
]
]
] |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, unicode_literals, print_function
import re
import os
import tempfile
import six
from .. import environment
from ..console import log
from .. import util
WIN = (os.name == "nt")
def _find_conda():
"""Find the conda executable robustly across conda versions.
Returns
-------
conda : str
Path to the conda executable.
Raises
------
IOError
If the executable cannot be found in either the CONDA_EXE environment
variable or in the PATH.
Notes
-----
In POSIX platforms in conda >= 4.4, conda can be set up as a bash function
rather than an executable. (This is to enable the syntax
``conda activate env-name``.) In this case, the environment variable
``CONDA_EXE`` contains the path to the conda executable. In other cases,
we use standard search for the appropriate name in the PATH.
See https://github.com/airspeed-velocity/asv/issues/645 for more details.
"""
if 'CONDA_EXE' in os.environ:
conda = os.environ['CONDA_EXE']
else:
conda = util.which('conda')
return conda
class Conda(environment.Environment):
"""
Manage an environment using conda.
Dependencies are installed using ``conda``. The benchmarked
project is installed using ``pip`` (since ``conda`` doesn't have a
method to install from an arbitrary ``setup.py``).
"""
tool_name = "conda"
_matches_cache = {}
def __init__(self, conf, python, requirements):
"""
Parameters
----------
conf : Config instance
python : str
Version of Python. Must be of the form "MAJOR.MINOR".
requirements : dict
Dictionary mapping a PyPI package name to a version
identifier string.
"""
self._python = python
self._requirements = requirements
self._conda_channels = conf.conda_channels
super(Conda, self).__init__(conf, python, requirements)
@classmethod
def matches(cls, python):
# Calling conda can take a long time, so remember the result
if python not in cls._matches_cache:
cls._matches_cache[python] = cls._matches(python)
return cls._matches_cache[python]
@classmethod
def _matches(cls, python):
if not re.match(r'^[0-9].*$', python):
# The python name should be a version number
return False
try:
conda = _find_conda()
except IOError:
return False
else:
# This directory never gets created, since we're just
# doing a dry run below. All it needs to be is something
# that doesn't already exist.
path = os.path.join(tempfile.gettempdir(), 'check')
# Check that the version number is valid
try:
util.check_call([
conda,
'create',
'--yes',
'-p',
path,
'python={0}'.format(python),
'--dry-run'], display_error=False, dots=False)
except util.ProcessError:
return False
else:
return True
def _setup(self):
try:
conda = _find_conda()
except IOError as e:
raise util.UserError(str(e))
log.info("Creating conda environment for {0}".format(self.name))
# create a temporary environment.yml file
# and use that to generate the env for benchmarking
env_file = tempfile.NamedTemporaryFile(mode='w', delete=False, suffix=".yml")
try:
env_file.write('name: {0}\n'
'channels:\n'.format(self.name))
env_file.writelines((' - %s\n' % ch for ch in self._conda_channels))
env_file.write('dependencies:\n'
' - python={0}\n'
' - wheel\n'
' - pip\n'.format(self._python))
# categorize & write dependencies based on pip vs. conda
conda_args, pip_args = self._get_requirements(conda)
env_file.writelines((' - %s\n' % s for s in conda_args))
if pip_args:
# and now specify the packages that are to be installed in
# the pip subsection
env_file.write(' - pip:\n')
env_file.writelines((' - %s\n' % s for s in pip_args))
env_file.close()
util.check_output([conda] + ['env', 'create', '-f', env_file.name,
'-p', self._path, '--force'])
except Exception as exc:
if os.path.isfile(env_file.name):
with open(env_file.name, 'r') as f:
text = f.read()
log.info("conda env create failed: in {} with:\n{}".format(self._path, text))
raise
finally:
os.unlink(env_file.name)
def _get_requirements(self, conda):
if self._requirements:
# retrieve and return all conda / pip dependencies
conda_args = []
pip_args = []
for key, val in six.iteritems(self._requirements):
if key.startswith('pip+'):
if val:
pip_args.append("{0}=={1}".format(key[4:], val))
else:
pip_args.append(key[4:])
else:
if val:
conda_args.append("{0}={1}".format(key, val))
else:
conda_args.append(key)
return conda_args, pip_args
else:
return [], []
def run(self, args, **kwargs):
log.debug("Running '{0}' in {1}".format(' '.join(args), self.name))
return self.run_executable('python', args, **kwargs)
def run_executable(self, executable, args, **kwargs):
# Conda doesn't guarantee that user site directories are excluded
kwargs["env"] = dict(kwargs.pop("env", os.environ),
PYTHONNOUSERSITE=str("True"))
return super(Conda, self).run_executable(executable, args, **kwargs)
| [
[
[
112,
127
]
],
[
[
129,
137
]
],
[
[
139,
155
]
],
[
[
157,
171
]
],
[
[
180,
182
],
[
2443,
2445
]
],
[
[
190,
192
],
[
304,
306
],
[
1117,
1119
],
[
1145,
1147
],
[
2865,
2867
],
[
4879,
4881
],
[
5139,
5141
],
[
6263,
6265
]
],
[
[
200,
208
],
[
2878,
2886
],
[
3726,
3734
]
],
[
[
217,
220
],
[
5382,
5385
]
],
[
[
237,
248
],
[
1246,
1257
]
],
[
[
271,
274
],
[
3531,
3534
],
[
5014,
5017
],
[
5954,
5957
]
],
[
[
290,
294
],
[
1195,
1199
],
[
2997,
3001
],
[
3288,
3292
],
[
3499,
3503
],
[
4693,
4697
]
],
[
[
297,
300
]
],
[
[
327,
338
],
[
2591,
2602
],
[
3438,
3449
]
],
[
[
1240,
1245
],
[
2063,
2068
],
[
6356,
6361
]
]
] |
End of preview. Expand
in Dataset Viewer.
README.md exists but content is empty.
Use the Edit dataset card button to edit it.
- Downloads last month
- 31