repo_name
stringlengths 6
61
| path
stringlengths 4
230
| copies
stringlengths 1
3
| size
stringlengths 4
6
| text
stringlengths 1.01k
850k
| license
stringclasses 15
values | hash
int64 -9,220,477,234,079,998,000
9,219,060,020B
| line_mean
float64 11.6
96.6
| line_max
int64 32
939
| alpha_frac
float64 0.26
0.9
| autogenerated
bool 1
class | ratio
float64 1.62
6.1
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
mtearle/npyscreenreactor | npyscreenreactor/__init__.py | 1 | 2663 | #!/usr/bin/env python
# npyscreenreactory.py
# Inspired by pausingreactor.py and xmmsreactor.py
# npyscreen modifications
# Copyright (c) 2015 Mark Tearle <mark@tearle.com>
# See LICENSE for details.
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
"""
This module provides npyscreen event loop support for Twisted.
In order to use this support, simply do the following::
| import npyscreenreactor
| npyscreenreactor.install()
Then, when your root npyscreenApp has been created::
| from twisted.internet import reactor
| reactor.registerNpyscreenApp(yourApp)
| reactor.run()
Then use twisted.internet APIs as usual.
Stop the event loop using reactor.stop()
Maintainer: Mark Tearle
"""
from twisted.python import log, runtime
from twisted.internet import selectreactor
import npyscreen
class NpyscreenReactor(selectreactor.SelectReactor):
"""
npyscreen reactor.
npyscreen drives the event loop
"""
def doIteration(self, timeout):
# Executing what normal reactor would do...
self.runUntilCurrent()
selectreactor.SelectReactor.doIteration(self, timeout)
# push event back on the npyscreen queue
self.npyscreenapp.queue_event(npyscreen.Event("_NPYSCREEN_REACTOR"))
def registerNpyscreenApp(self, npyscreenapp):
"""
Register npyscreen.StandardApp instance with the reactor.
"""
self.npyscreenapp = npyscreenapp
# push an event on the npyscreen queue
self.npyscreenapp.add_event_hander("_NPYSCREEN_REACTOR", self._twisted_events)
def _twisted_events(self, event):
self.doIteration(0)
def _stopNpyscreen(self):
"""
Stop the Npsycreen event loop if it hasn't already been stopped.
Called during Twisted event loop shutdown.
"""
if hasattr(self, "npyscreenapp"):
self.npyscreenapp.setNextForm(None)
def run(self,installSignalHandlers=True):
"""
Start the reactor.
"""
# Executing what normal reactor would do...
self.startRunning(installSignalHandlers=installSignalHandlers)
# do initial iteration and put event on queue to do twisted things
self.doIteration(0)
# add cleanup events:
self.addSystemEventTrigger("after", "shutdown", self._stopNpyscreen)
#
self.npyscreenapp.run()
def install():
"""
Configure the twisted mainloop to be run inside the npyscreen mainloop.
"""
reactor = NpyscreenReactor()
from twisted.internet.main import installReactor
installReactor(reactor)
return reactor
__all__ = ['install']
| mit | 1,564,531,213,550,637,600 | 25.63 | 86 | 0.685693 | false | 3.962798 | false | false | false |
google-research-datasets/tydiqa | baseline/debug.py | 1 | 6315 | # coding=utf-8
# Copyright 2020 The Google Research Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functions for logging debug info for use during model dev cycle."""
from absl import logging
import data
def is_int_list(value):
"""Checks if a value's type is a list of integers."""
return value and isinstance(value, list) and isinstance(value[0], int)
def is_unicode_list(value):
"""Checks if a value's type is a list of Unicode strings."""
if value and isinstance(value, list):
return isinstance(value[0], str)
return False
def is_valid_unicode(text):
"""Check if a string is valid unicode. Did we slice on an invalid boundary?"""
try:
text.decode("utf-8")
return True
except UnicodeDecodeError:
return False
def log_debug_info(filename, line_no, entry, debug_info, reverse_vocab_table):
"""Logs `debug_info` for debugging purposes."""
# Enable when debugging experimental new things.
extremely_verbose = False
def sanitize_char(c):
"""Optionally normalize chars we don't want in log messages."""
# Don't like having too many newlines in your debugging log output?
# Change this.
remove_newlines = False
if c == "\r":
if remove_newlines:
return " "
return "\r"
if c == "\n":
if remove_newlines:
return " "
return "\n"
return c
def sanitize(s):
return "".join(sanitize_char(c) for c in s)
doc = entry["plaintext"]
if "json" in debug_info:
json_elem = debug_info["json"]
else:
json_elem = None
logging.info("No 'json' key in `debug_info`.")
if "tydi_example" in debug_info:
tydi_example = debug_info["tydi_example"]
else:
tydi_example = None
logging.info("No 'tydi_example' key in `debug_info`.")
offset_to_wp = None
doc_wp = None
logging.info("=== Logging example %s:%d ===", filename, line_no)
window = 20
for i in range(0, data.byte_len(entry["contexts"]), window):
span_text = data.byte_slice(
entry["contexts"], i, i + window, errors="replace")
doc_offsets = entry["context_to_plaintext_offset"][i:i + window]
# Now double-check that those doc offsets actually match the text we expect.
recovered_doc = [
data.byte_slice(doc, i, i + 1, errors="replace")
for i in doc_offsets
if i != -1
]
if extremely_verbose:
logging.info("context_to_doc: %d: %s (%s) %s", i,
sanitize(span_text), " ".join(str(x) for x in doc_offsets),
sanitize(recovered_doc))
for key, value in debug_info.items():
if key == "offset_to_wp":
offset_to_wp = value
continue
# Convert wordpiece vocab IDs back into readable text.
if is_int_list(value) and "wp_ids" in key:
value = [reverse_vocab_table[word_id] for word_id in value]
# Convert Unicode escapes to readable text.
if is_unicode_list(value):
value = [word.encode("utf-8") for word in value]
if key == "all_doc_wp_ids":
doc_wp = value
# Represent lists as plaintext.
if isinstance(value, list):
value = " ".join(str(item) for item in value)
value = str(value)
logging.info("%s: %s", key, value)
if offset_to_wp is not None:
for i in range(0, data.byte_len(entry["contexts"]), window):
wp_slice = []
for byte_offset in range(i, i + window):
if byte_offset in offset_to_wp:
wp_offset = offset_to_wp[byte_offset]
wp_slice.append(doc_wp[wp_offset])
else:
wp_slice.append("-1")
context_slice = data.byte_slice(
entry["contexts"], i, i + window, errors="replace")
logging.info("context_to_wp: %d: %s (%s)", i, sanitize(context_slice),
" ".join(str(x) for x in wp_slice))
if "searched_offset_to_wp" in debug_info:
logging.info("searched_offset_to_wp: %s",
" ".join(str(i) for i in debug_info["searched_offset_to_wp"]))
if json_elem:
logging.info(
"json.annotations[0].minimal_answer.plaintext_start_byte: %d",
json_elem["annotations"][0]["minimal_answer"]["plaintext_start_byte"])
logging.info(
"json.annotations[0].minimal_answer.plaintext_end_byte: %d",
json_elem["annotations"][0]["minimal_answer"]["plaintext_end_byte"])
min_ans_sp = json_elem["annotations"][0]["minimal_answer"]
min_ans_text = data.byte_slice(
json_elem["document_plaintext"],
min_ans_sp["plaintext_start_byte"],
min_ans_sp["plaintext_end_byte"],
errors="replace")
min_ans_text_in_context = data.byte_slice(
json_elem["document_plaintext"],
min_ans_sp["plaintext_start_byte"] - 100,
min_ans_sp["plaintext_end_byte"] + 100,
errors="replace")
logging.info("minimal answer text (from json): %s", min_ans_text)
logging.info("minimal answer text in context: %s", min_ans_text_in_context)
logging.info("entry.answer.span_start: %d", entry["answer"]["span_start"])
logging.info("entry.answer.span_end: %d", entry["answer"]["span_end"])
logging.info("entry.answer.span_text: %s", entry["answer"]["span_text"])
if tydi_example:
# Non-train examples may not have offsets.
if tydi_example.start_byte_offset:
logging.info("tydi_example.start_byte_offset: %d",
tydi_example.start_byte_offset)
logging.info("tydi_example.end_byte_offset: %d",
tydi_example.end_byte_offset)
tydi_example_min_ans_text = data.byte_slice(
entry["contexts"],
tydi_example.start_byte_offset,
tydi_example.end_byte_offset,
errors="replace")
logging.info(
"minimal answer text (from TyDiExample byte offsets in `contexts`): %s",
tydi_example_min_ans_text)
logging.info("^^^ End example ^^^")
| apache-2.0 | 6,415,408,893,063,222,000 | 34.477528 | 82 | 0.633888 | false | 3.523996 | false | false | false |
dpiekacz/cumulus-linux-ansible-modules | library/cl_quagga_ospf.py | 2 | 15655 | #!/usr/bin/env python
#
# Copyright (C) 2014, Cumulus Networks www.cumulusnetworks.com
#
#
DOCUMENTATION = '''
---
module: cl_quagga_ospf
author: Cumulus Networks
short_description: Configure basic OSPFv2 parameters and interfaces using Quagga
description:
- Configures basic OSPFv2 global parameters such as \
router id and bandwidth cost, or OSPFv2 interface configuration \
like point-to-point settings or enabling OSPFv2 on an interface. \
Configuration is applied to single OSPFv2 instance. \
Multiple OSPFv2 instance configuration is currently not supported. \
It requires Quagga version 0.99.22 and higher with the non-modal Quagga CLI \
developed by Cumulus Linux. For more details go to the Routing User Guide @ \
http://cumulusnetworks.com/docs/2.2/ and Quagga Docs @ http://www.nongnu.org/quagga/
options:
router_id:
description:
- Set the OSPFv2 router id
required: true
reference_bandwidth:
description:
- Set the OSPFv2 auto cost reference bandwidth
default: 40000
saveconfig:
description:
- Boolean. Issue write memory to save the config
choices: ['yes', 'no']
default: ['no']
interface:
description:
- define the name the interface to apply OSPFv2 services.
point2point:
description:
- Boolean. enable OSPFv2 point2point on the interface
choices: ['yes', 'no']
require_together:
- with interface option
area:
description:
- defines the area the interface is in
required_together:
- with interface option
cost:
description:
- define ospf cost.
required_together:
- with interface option
passive:
description:
- make OSPFv2 interface passive
choices: ['yes', 'no']
required_together:
- with interface option
state:
description:
- Describes if OSPFv2 should be present on a particular interface.\
Module currently does not check that interface is not associated \
with a bond or bridge. \
User will have to manually clear the configuration of the interface \
from the bond or bridge. \
This will be implemented in a later release
choices: [ 'present', 'absent']
default: 'present'
required_together:
- with interface option
requirements: ['Cumulus Linux Quagga non-modal CLI, Quagga version 0.99.22 and higher']
'''
EXAMPLES = '''
Example playbook entries using the cl_quagga_ospf module
tasks:
- name: configure ospf router_id
cl_quagga_ospf: router_id=10.1.1.1
- name: enable OSPFv2 on swp1 and set it be a point2point OSPF \
interface with a cost of 65535
cl_quagga_ospf: interface=swp1 point2point=yes cost=65535
- name: enable ospf on swp1-5
cl_quagga_ospf: interface={{ item }}
with_sequence: start=1 end=5 format=swp%d
- name: disable ospf on swp1
cl_quagga_ospf: interface=swp1 state=absent
'''
def run_cl_cmd(module, cmd, check_rc=True, split_lines=True):
try:
(rc, out, err) = module.run_command(cmd, check_rc=check_rc)
except Exception, e:
module.fail_json(msg=e.strerror)
# trim last line as it is always empty
if split_lines:
ret = out.splitlines()
else:
ret = out
return ret
def check_dsl_dependencies(module, input_options,
dependency, _depend_value):
for _param in input_options:
if module.params.get(_param):
if not module.params.get(dependency):
_param_output = module.params.get(_param)
_msg = "incorrect syntax. " + _param + " must have an interface option." + \
" Example 'cl_quagga_ospf: " + dependency + "=" + _depend_value + " " + \
_param + "=" + _param_output + "'"
module.fail_json(msg=_msg)
def has_interface_config(module):
if module.params.get('interface') is not None:
return True
else:
return False
def get_running_config(module):
running_config = run_cl_cmd(module, '/usr/bin/vtysh -c "show run"')
got_global_config = False
got_interface_config = False
module.interface_config = {}
module.global_config = []
for line in running_config:
line = line.lower().strip()
# ignore the '!' lines or blank lines
if len(line.strip()) <= 1:
if got_global_config:
got_global_config = False
if got_interface_config:
got_interface_config = False
continue
# begin capturing global config
m0 = re.match('router\s+ospf', line)
if m0:
got_global_config = True
continue
m1 = re.match('^interface\s+(\w+)', line)
if m1:
module.ifacename = m1.group(1)
module.interface_config[module.ifacename] = []
got_interface_config = True
continue
if got_interface_config:
module.interface_config[module.ifacename].append(line)
continue
if got_global_config:
m3 = re.match('\s*passive-interface\s+(\w+)', line)
if m3:
ifaceconfig = module.interface_config.get(m3.group(1))
if ifaceconfig:
ifaceconfig.append('passive-interface')
else:
module.global_config.append(line)
continue
def get_config_line(module, stmt, ifacename=None):
if ifacename:
pass
else:
for i in module.global_config:
if re.match(stmt, i):
return i
return None
def update_router_id(module):
router_id_stmt = 'ospf router-id '
actual_router_id_stmt = get_config_line(module, router_id_stmt)
router_id_stmt = 'ospf router-id ' + module.params.get('router_id')
if router_id_stmt != actual_router_id_stmt:
cmd_line = "/usr/bin/cl-ospf router-id set %s" %\
(module.params.get('router_id'))
run_cl_cmd(module, cmd_line)
module.exit_msg += 'router-id updated '
module.has_changed = True
def update_reference_bandwidth(module):
bandwidth_stmt = 'auto-cost reference-bandwidth'
actual_bandwidth_stmt = get_config_line(module, bandwidth_stmt)
bandwidth_stmt = bandwidth_stmt + ' ' + \
module.params.get('reference_bandwidth')
if bandwidth_stmt != actual_bandwidth_stmt:
cmd_line = "/usr/bin/cl-ospf auto-cost set reference-bandwidth %s" %\
(module.params.get('reference_bandwidth'))
run_cl_cmd(module, cmd_line)
module.exit_msg += 'reference bandwidth updated '
module.has_changed = True
def add_global_ospf_config(module):
module.has_changed = False
get_running_config(module)
if module.params.get('router_id'):
update_router_id(module)
if module.params.get('reference_bandwidth'):
update_reference_bandwidth(module)
if module.has_changed is False:
module.exit_msg = 'No change in OSPFv2 global config'
module.exit_json(msg=module.exit_msg, changed=module.has_changed)
def check_ip_addr_show(module):
cmd_line = "/sbin/ip addr show %s" % (module.params.get('interface'))
result = run_cl_cmd(module, cmd_line)
for _line in result:
m0 = re.match('\s+inet\s+\w+', _line)
if m0:
return True
return False
def get_interface_addr_config(module):
ifacename = module.params.get('interface')
cmd_line = "/sbin/ifquery --format json %s" % (ifacename)
int_config = run_cl_cmd(module, cmd_line, True, False)
ifquery_obj = json.loads(int_config)[0]
iface_has_address = False
if 'address' in ifquery_obj.get('config'):
for addr in ifquery_obj.get('config').get('address'):
try:
socket.inet_aton(addr.split('/')[0])
iface_has_address = True
break
except socket.error:
pass
else:
iface_has_address = check_ip_addr_show(module)
if iface_has_address is False:
_msg = "interface %s does not have an IP configured. " +\
"Required for OSPFv2 to work"
module.fail_json(msg=_msg)
# for test purposes only
return iface_has_address
def enable_or_disable_ospf_on_int(module):
ifacename = module.params.get('interface')
_state = module.params.get('state')
iface_config = module.interface_config.get(ifacename)
if iface_config is None:
_msg = "%s is not found in Quagga config. " % (ifacename) + \
"Check that %s is active in kernel" % (ifacename)
module.fail_json(msg=_msg)
return False # for test purposes
found_area = None
for i in iface_config:
m0 = re.search('ip\s+ospf\s+area\s+([0-9.]+)', i)
if m0:
found_area = m0.group(1)
break
if _state == 'absent':
for i in iface_config:
if found_area:
cmd_line = '/usr/bin/cl-ospf clear %s area' % \
(ifacename)
run_cl_cmd(module, cmd_line)
module.has_changed = True
module.exit_msg += "OSPFv2 now disabled on %s " % (ifacename)
return False
area_id = module.params.get('area')
if found_area != area_id:
cmd_line = '/usr/bin/cl-ospf interface set %s area %s' % \
(ifacename, area_id)
run_cl_cmd(module, cmd_line)
module.has_changed = True
module.exit_msg += "OSPFv2 now enabled on %s area %s " % \
(ifacename, area_id)
return True
def update_point2point(module):
ifacename = module.params.get('interface')
point2point = module.params.get('point2point')
iface_config = module.interface_config.get(ifacename)
found_point2point = None
for i in iface_config:
m0 = re.search('ip\s+ospf\s+network\s+point-to-point', i)
if m0:
found_point2point = True
break
if point2point:
if not found_point2point:
cmd_line = '/usr/bin/cl-ospf interface set %s network point-to-point' % \
(ifacename)
run_cl_cmd(module, cmd_line)
module.has_changed = True
module.exit_msg += 'OSPFv2 point2point set on %s ' % (ifacename)
else:
if found_point2point:
cmd_line = '/usr/bin/cl-ospf interface clear %s network' % \
(ifacename)
run_cl_cmd(module, cmd_line)
module.has_changed = True
module.exit_msg += 'OSPFv2 point2point removed on %s ' % \
(ifacename)
def update_passive(module):
ifacename = module.params.get('interface')
passive = module.params.get('passive')
iface_config = module.interface_config.get(ifacename)
found_passive = None
for i in iface_config:
m0 = re.search('passive-interface', i)
if m0:
found_passive = True
break
if passive:
if not found_passive:
cmd_line = '/usr/bin/cl-ospf interface set %s passive' % \
(ifacename)
run_cl_cmd(module, cmd_line)
module.has_changed = True
module.exit_msg += '%s is now OSPFv2 passive ' % (ifacename)
else:
if found_passive:
cmd_line = '/usr/bin/cl-ospf interface clear %s passive' % \
(ifacename)
run_cl_cmd(module, cmd_line)
module.has_changed = True
module.exit_msg += '%s is no longer OSPFv2 passive ' % \
(ifacename)
def update_cost(module):
ifacename = module.params.get('interface')
cost = module.params.get('cost')
iface_config = module.interface_config.get(ifacename)
found_cost = None
for i in iface_config:
m0 = re.search('ip\s+ospf\s+cost\s+(\d+)', i)
if m0:
found_cost = m0.group(1)
break
if cost != found_cost and cost is not None:
cmd_line = '/usr/bin/cl-ospf interface set %s cost %s' % \
(ifacename, cost)
run_cl_cmd(module, cmd_line)
module.has_changed = True
module.exit_msg += 'OSPFv2 cost on %s changed to %s ' % \
(ifacename, cost)
elif cost is None and found_cost is not None:
cmd_line = '/usr/bin/cl-ospf interface clear %s cost' % \
(ifacename)
run_cl_cmd(module, cmd_line)
module.has_changed = True
module.exit_msg += 'OSPFv2 cost on %s changed to default ' % \
(ifacename)
def config_ospf_interface_config(module):
enable_int_defaults(module)
module.has_changed = False
# get all ospf related config from quagga both globally and iface based
get_running_config(module)
# if interface does not have ipv4 address module should fail
get_interface_addr_config(module)
# if ospf should be enabled, continue to check for the remaining attrs
if enable_or_disable_ospf_on_int(module):
# update ospf point-to-point setting if needed
update_point2point(module)
# update ospf interface cost if needed
update_cost(module)
# update ospf interface passive setting
update_passive(module)
def saveconfig(module):
if module.params.get('saveconfig') is True and\
module.has_changed:
run_cl_cmd(module, '/usr/bin/vtysh -c "wr mem"')
module.exit_msg += 'Saving Config '
def enable_int_defaults(module):
if not module.params.get('area'):
module.params['area'] = '0.0.0.0'
if not module.params.get('state'):
module.params['state'] = 'present'
def check_if_ospf_is_running(module):
if not os.path.exists('/var/run/quagga/ospfd.pid'):
_msg = 'OSPFv2 process is not running. Unable to execute command'
module.fail_json(msg=_msg)
def main():
module = AnsibleModule(
argument_spec=dict(
reference_bandwidth=dict(type='str',
default='40000'),
router_id=dict(type='str'),
interface=dict(type='str'),
cost=dict(type='str'),
area=dict(type='str'),
state=dict(type='str',
choices=['present', 'absent']),
point2point=dict(type='bool', choices=BOOLEANS),
saveconfig=dict(type='bool', choices=BOOLEANS, default=False),
passive=dict(type='bool', choices=BOOLEANS)
),
mutually_exclusive=[['reference_bandwidth', 'interface'],
['router_id', 'interface']]
)
check_if_ospf_is_running(module)
check_dsl_dependencies(module, ['cost', 'state', 'area',
'point2point', 'passive'],
'interface', 'swp1')
module.has_changed = False
module.exit_msg = ''
if has_interface_config(module):
config_ospf_interface_config(module)
else:
# Set area to none before applying global config
module.params['area'] = None
add_global_ospf_config(module)
saveconfig(module)
if module.has_changed:
module.exit_json(msg=module.exit_msg, changed=module.has_changed)
else:
module.exit_json(msg='no change', changed=False)
# import module snippets
from ansible.module_utils.basic import *
import re
import os
import socket
# incompatible with ansible 1.4.4 - ubuntu 12.04 version
# from ansible.module_utils.urls import *
if __name__ == '__main__':
main()
| gpl-3.0 | 101,102,559,503,141,020 | 34.3386 | 93 | 0.595976 | false | 3.716762 | true | false | false |
paulbodean88/automation-design-patterns | src/state/state.py | 1 | 1939 | """
This pattern provides different behaviours based on the internal object state
An implementation examples based on the test execution life cycle will be provided
"""
import abc
from src.utils import get_selenium_driver
class Manager:
"""
State machine manager.
Acting as an interface to the client and providing the actual state of the object
"""
def __init__(self, state):
"""
:param state: current object state
"""
self._state = state
def get_state(self):
"""
:return: state getter
"""
self._state.run()
class State(metaclass=abc.ABCMeta):
"""
Interface definition for behaviour encapsulation
"""
def __init__(self):
self._driver = get_selenium_driver('chrome')
def get_driver(self):
return self._driver
@abc.abstractmethod
def run(self):
pass
class StartTest(State):
"""
Prepare the test execution environment
"""
def run(self):
print(" Start test state!!! ")
self.get_driver().get('https://en.wikipedia.org/')
class ExecuteTest(State):
"""
Run run different test steps
"""
SEARCH_BUTTON = 'searchButton'
def run(self):
print(" Execute test steps state!!! ")
if self.get_driver().find_element_by_id(ExecuteTest.SEARCH_BUTTON).is_displayed():
print("Search button available")
self._driver.find_element_by_id(ExecuteTest.SEARCH_BUTTON).click()
else:
print("Search button not available")
class StopTest(State):
"""
Close the testing session
"""
def run(self):
print(" Stop test state!!! ")
self.get_driver().quit()
if __name__ == '__main__':
start = StartTest()
execute = ExecuteTest()
stop = StopTest()
for test_state in [start, execute, stop]:
manager = Manager(test_state)
manager.get_state()
| mit | -7,762,629,137,601,126,000 | 20.786517 | 90 | 0.601341 | false | 4.280353 | true | false | false |
Rassilion/ProjectC | web/app/views.py | 1 | 6083 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import time
from sqlalchemy import desc
from forms import *
from flask.ext.security import roles_accepted, roles_required, login_required, Security, utils, current_user
from . import app, db, r
from .utils.table import Table
from flask import request, g, render_template, redirect, url_for, session, send_from_directory, flash
from models import *
from admin import init_admin
# initilize flask-security
security = Security(app, user_datastore, register_form=ExtendedRegisterForm)
# page render time
@app.before_request
def before_request():
g.request_start_time = time.time()
g.request_time = lambda: "%.5fs" % (time.time() - g.request_start_time)
# search engine things
@app.route('/robots.txt')
@app.route('/sitemap.xml')
def static_from_root():
return send_from_directory(app.static_folder, request.path[1:])
@app.route('/')
@app.route('/index')
def index():
news = News.query.all()
return render_template('index.html', title='Anasayfa', news=news)
@app.route('/about')
def about():
return render_template('about.html', title=u'Hakkında')
@app.route('/news/<slug>')
def news(slug):
post = News.query.filter_by(slug=slug).first_or_404()
return render_template('news.html', title=post.title, post=post)
@app.route('/problems/')
@app.route('/problems/<int:page>')
def problem_list(page=1):
problems = sort(Problem, Problem.query, problem_sort_list).paginate(
page=page, per_page=app.config["PRODUCTS_PER_PAGE"],
)
problems_table = Table(problem_sort_list, problem_column_list, problems)
return render_template('problem_list.html', title='Problem Listesi', problems_table=problems_table)
@app.route('/problem/<slug>', methods=['GET', 'POST'])
def problem(slug):
problem = Problem.query.filter_by(slug=slug).first_or_404()
form = SubmissionForm()
if form.validate_on_submit():
try:
newS = Submission(problem_id=problem.id, user_id=current_user.id, code=form.code.data)
db.session.add(newS)
db.session.commit()
# publish to redis
r.publish('submissions', str(newS.id))
flash(u'Tebrikler kodunuz eklendi, kodlarım sayfasından görebilirsiniz', 'success')
except:
db.session.rollback()
flash(u'Bir hata oluştu lütfen daha sonra deneyin', 'error')
return render_template('problem.html', title=problem.title, problem=problem, form=form)
@app.route('/problem/<slug>/solution')
@login_required
def problem_solution(slug):
problem = Problem.query.filter_by(slug=slug).first_or_404()
return render_template('problem_solution.html', title=problem.title, problem=problem)
@app.route('/problem/<slug>/suggestion')
@login_required
def problem_suggestion(slug):
problem = Problem.query.filter_by(slug=slug).first_or_404()
return render_template('problem.html', title=problem.title, problem=problem)
@app.route('/author/profile/<username>/')
@app.route('/author/profile/<username>/<int:page>')
def author_profile(username, page=1):
author = User.query.filter_by(username=username).first_or_404()
problems = sort(Problem, author.problems, problem_sort_list).paginate(
page=page, per_page=app.config["PRODUCTS_PER_PAGE"],
)
problems_table = Table(problem_sort_list, problem_column_list, problems)
return render_template('author_profile.html', title=author.username, author=author, problems_table=problems_table)
@app.route('/tag/<name>/')
@app.route('/tag/<name>/<int:page>')
def tag(name, page=1):
tag = Tag.query.filter_by(name=name).first_or_404()
problems = sort(Problem, tag.problems, problem_sort_list).paginate(
page=page, per_page=app.config["PRODUCTS_PER_PAGE"],
)
problems_table = Table(problem_sort_list, problem_column_list, problems)
return render_template('tag.html', title=tag.name, tag=tag, problems_table=problems_table)
@app.route('/user/<username>')
def user_profile(username):
user = User.query.filter_by(username=username).first_or_404()
# order submissions by timestamp
submissions = user.submissions.order_by(Submission.timestamp.desc())
return render_template('user_profile.html', title=user.username, user=user, submissions=submissions)
@login_required
@app.route('/submission/<int:id>')
def user_submission(id):
submission = Submission.query.filter_by(id=id).first_or_404()
return render_template('user_submission.html', title=u"Submision", submission=submission)
@app.route('/author/panel/add', methods=['GET', 'POST'])
@login_required
@roles_accepted('author', 'admin')
def author_panel_add():
form = ProblemForm()
if form.validate_on_submit():
try:
newp = Problem(title=form.title.data, body=form.body.data, solution=form.solution.data)
newp.tags = form.tags.data
db.session.add(newp)
current_user.problems.append(newp)
db.session.commit()
flash(u'Tebrikler Probleminiz eklendi, Problemler sayfasından görebilirsiniz', 'success')
except:
db.session.rollback()
flash(u'Bir hata oluştu lütfen daha sonra deneyin', 'error')
return render_template('author_panel_add.html', title=u'Yeni soru ekle', form=form)
problem_sort_list = {'id', 'title', 'count', 'difficulty'}
problem_column_list = [('id', u'id'), ('title', u'başlık'), ('tags', u'İlgili konular'), ('count', u'Çözüm sayısı'), (
'difficulty', u'Zorluk')]
def sort(model, query, sort_list):
"""
sort query with url args
:param model:
db model name
:param query:
sql alchemy query
:param sort_list:
allowed sort url args
:return:
sorted query if fails return query
"""
sort = request.args.get('sort', 'id')
sort_desc = request.args.get('desc', 0, type=int)
if sort not in sort_list:
return query
if sort_desc == 1:
return query.order_by(desc(getattr(model, sort)))
else:
return query.order_by(getattr(model, sort))
init_admin()
| gpl-3.0 | -1,107,422,444,037,347,500 | 33.460227 | 118 | 0.679472 | false | 3.299782 | false | false | false |
jonasfoe/COPASI | copasi/bindings/python/unittests/Test_CFunctionParameter.py | 1 | 2533 | # -*- coding: utf-8 -*-
# Copyright (C) 2017 by Pedro Mendes, Virginia Tech Intellectual
# Properties, Inc., University of Heidelberg, and University of
# of Connecticut School of Medicine.
# All rights reserved.
# Copyright (C) 2010 - 2016 by Pedro Mendes, Virginia Tech Intellectual
# Properties, Inc., University of Heidelberg, and The University
# of Manchester.
# All rights reserved.
# Copyright (C) 2008 - 2009 by Pedro Mendes, Virginia Tech Intellectual
# Properties, Inc., EML Research, gGmbH, University of Heidelberg,
# and The University of Manchester.
# All rights reserved.
# Copyright (C) 2006 - 2007 by Pedro Mendes, Virginia Tech Intellectual
# Properties, Inc. and EML Research, gGmbH.
# All rights reserved.
import COPASI
import unittest
from types import *
class Test_CFunctionParameter(unittest.TestCase):
def setUp(self):
self.functions=COPASI.CRootContainer.getFunctionList()
self.function=self.functions.findFunction("Iso Uni Uni")
self.assert_(self.function!=None)
self.assert_(self.function.__class__==COPASI.CFunction)
self.parameters=self.function.getVariables()
self.assert_(self.parameters!=None)
self.assert_(self.parameters.__class__==COPASI.CFunctionParameters)
index=self.parameters.findParameterByName("Keq",COPASI.CFunctionParameter.FLOAT64)
self.parameter=self.parameters.getParameter(index)
self.assert_(self.parameter!=None)
self.assert_(self.parameter.__class__==COPASI.CFunctionParameter)
def test_getKey(self):
key=self.parameter.getKey()
self.assert_(type(key)==StringType)
def test_getType(self):
b=self.parameter.getType()
self.assert_(type(b)==IntType)
self.assert_(b==COPASI.CFunctionParameter.FLOAT64)
def test_setType(self):
t=COPASI.CFunctionParameter.INT32
self.parameter.setType(t)
self.assert_(self.parameter.getType()==t)
def test_getUsage(self):
b=self.parameter.getUsage()
self.assert_(type(b)==IntType)
self.assert_(b==COPASI.CFunctionParameter.PARAMETER)
def test_setUsage(self):
t=COPASI.CFunctionParameter.VOLUME
self.parameter.setUsage(t)
self.assert_(self.parameter.getUsage()==t)
def suite():
tests=[
"test_getKey"
,"test_getType"
,"test_setType"
,"test_getUsage"
,"test_setUsage"
]
return unittest.TestSuite(map(Test_CFunctionParameter,tests))
if(__name__ == '__main__'):
unittest.TextTestRunner(verbosity=2).run(suite())
| artistic-2.0 | -7,394,096,215,339,876,000 | 30.6625 | 86 | 0.699566 | false | 3.306789 | true | false | false |
Harry-R/skylines | skylines/model/search.py | 3 | 6729 | import sys
import shlex
from sqlalchemy import literal_column, cast, desc, Unicode
from sqlalchemy.dialects.postgresql import array
from skylines.database import db
from skylines.lib.types import is_unicode
PATTERNS = [
(u'{}', 5), # Matches token exactly
(u'{}%', 3), # Begins with token
(u'% {}%', 2), # Has token at word start
(u'%{}%', 1), # Has token
]
def search_query(cls, tokens,
weight_func=None, include_misses=False, ordered=True):
# Read the searchable columns from the table (strings)
columns = cls.__searchable_columns__
# Convert the columns from strings into column objects
columns = [getattr(cls, c) for c in columns]
# The model name that can be used to match search result to model
cls_name = literal_column('\'{}\''.format(cls.__name__))
# Filter out id: tokens for later
ids, tokens = process_id_option(tokens)
# If there are still tokens left after id: token filtering
if tokens:
# Generate the search weight expression from the
# searchable columns, tokens and patterns
if not weight_func:
weight_func = weight_expression
weight = weight_func(columns, tokens)
# If the search expression only included "special" tokens like id:
else:
weight = literal_column(str(1))
# Create an array of stringified detail columns
details = getattr(cls, '__search_detail_columns__', None)
if details:
details = [cast(getattr(cls, d), Unicode) for d in details]
else:
details = [literal_column('NULL')]
# Create a query object
query = db.session.query(
cls_name.label('model'), cls.id.label('id'),
cls.name.label('name'), array(details).label('details'),
weight.label('weight'))
# Filter out specific ids (optional)
if ids:
query = query.filter(cls.id.in_(ids))
# Filter out results that don't match the patterns at all (optional)
if not include_misses:
query = query.filter(weight > 0)
# Order by weight (optional)
if ordered:
query = query.order_by(desc(weight))
return query
db.Model.search_query = classmethod(search_query)
def combined_search_query(models, tokens, include_misses=False, ordered=True):
models, tokens = process_type_option(models, tokens)
# Build sub search queries
queries = [model.search_query(
tokens, include_misses=include_misses, ordered=False)
for model in models]
# Build combined search query
query = queries[0]
if len(queries) > 1:
query = query.union(*queries[1:])
# Order by weight (optional)
if ordered:
query = query.order_by(desc('weight'))
return query
def process_type_option(models, tokens):
"""
This function looks for "type:<type>" in the tokens and filters the
searchable models for the requested types.
Returns the filtered list of models.
"""
# Filter for type: and types: tokens
types, new_tokens = __filter_prefixed_tokens('type', tokens)
# Filter the list of models according to the type filter
new_models = [model for model in models if model.__name__.lower() in types]
# Return original models list if there are no matching models
if len(new_models) == 0:
return models, new_tokens
# Return filtered models and tokens
return new_models, new_tokens
def process_id_option(tokens):
"""
This function looks for "id:<id>" in the tokens, removes them from the
token list and returns a list of ids.
"""
# Filter for id: and ids: tokens
ids, new_tokens = __filter_prefixed_tokens('id', tokens)
# Convert ids to integers
def int_or_none(value):
try:
return int(value)
except ValueError:
return None
ids = [int_or_none(id) for id in ids]
ids = [id for id in ids if id is not None]
# Return ids and tokens
return ids, new_tokens
def __filter_prefixed_tokens(prefix, tokens):
len_prefix = len(prefix)
# The original tokens without the prefixed tokens
new_tokens = []
# The contents that were found after the prefixed tokens
contents = []
# Iterate through original tokens to find prefixed tokens
for token in tokens:
_token = token.lower()
if _token.startswith(prefix + ':'):
contents.append(_token[(len_prefix + 1):])
elif _token.startswith(prefix + 's:'):
contents.extend(_token[(len_prefix + 2):].split(','))
else:
new_tokens.append(token)
# Strip whitespace from the types
contents = map(str.strip, contents)
return contents, new_tokens
def text_to_tokens(search_text):
assert is_unicode(search_text)
try:
if sys.version_info[0] == 2:
return [str.decode('utf8') for str in shlex.split(search_text.encode('utf8'))]
else:
return shlex.split(search_text)
except ValueError:
return search_text.split(' ')
def escape_tokens(tokens):
# Escape % and _ properly
tokens = [t.replace(u'%', u'\\%').replace(u'_', u'\\_') for t in tokens]
# Use * as wildcard character
tokens = [t.replace(u'*', u'%') for t in tokens]
return tokens
def weight_expression(columns, tokens):
expressions = []
# Use entire search string as additional token
if len(tokens) > 1:
tokens = tokens + [u' '.join(tokens)]
for column in columns:
for token in tokens:
len_token = len(token)
for pattern, weight in PATTERNS:
# Inject the token in the search pattern
token_pattern = pattern.format(token)
# Adjust the weight for the length of the token
# (the long the matched token, the greater the weight)
weight *= len_token
# Create the weighted ILIKE expression
expression = column.weighted_ilike(token_pattern, weight)
# Add the expression to list
expressions.append(expression)
return sum(expressions)
def process_results_details(models, results):
return [process_result_details(models, result._asdict()) for result in results]
def process_result_details(models, result):
models = {m.__name__: m for m in models}
model = models.get(result['model'], None)
if not model:
return result
details = getattr(model, '__search_detail_columns__', [None])
if len(details) != len(result['details']):
return result
for key, value in zip(details, result['details']):
if isinstance(key, str):
result[key] = value
return result
| agpl-3.0 | -5,880,510,911,118,901,000 | 27.0375 | 90 | 0.625799 | false | 4.058504 | false | false | false |
slackhq/python-slackclient | slack_sdk/scim/v1/response.py | 1 | 7575 | import json
from typing import Dict, Any, List, Optional
from slack_sdk.scim.v1.group import Group
from slack_sdk.scim.v1.internal_utils import _to_snake_cased
from slack_sdk.scim.v1.user import User
class Errors:
code: int
description: str
def __init__(self, code: int, description: str) -> None:
self.code = code
self.description = description
def to_dict(self) -> dict:
return {"code": self.code, "description": self.description}
class SCIMResponse:
url: str
status_code: int
headers: Dict[str, Any]
raw_body: str
body: Dict[str, Any]
snake_cased_body: Dict[str, Any]
errors: Optional[Errors]
@property
def snake_cased_body(self) -> Dict[str, Any]:
if self._snake_cased_body is None:
self._snake_cased_body = _to_snake_cased(self.body)
return self._snake_cased_body
@property
def errors(self) -> Optional[Errors]:
errors = self.snake_cased_body.get("errors")
if errors is None:
return None
return Errors(**errors)
def __init__(
self,
*,
url: str,
status_code: int,
raw_body: str,
headers: dict,
):
self.url = url
self.status_code = status_code
self.headers = headers
self.raw_body = raw_body
self.body = (
json.loads(raw_body)
if raw_body is not None and raw_body.startswith("{")
else None
)
self._snake_cased_body = None
def __repr__(self):
dict_value = {}
for key, value in vars(self).items():
dict_value[key] = value.to_dict() if hasattr(value, "to_dict") else value
if dict_value: # skipcq: PYL-R1705
return f"<slack_sdk.scim.v1.{self.__class__.__name__}: {dict_value}>"
else:
return self.__str__()
# ---------------------------------
# Users
# ---------------------------------
class SearchUsersResponse(SCIMResponse):
users: List[User]
@property
def users(self) -> List[User]:
return [User(**r) for r in self.snake_cased_body.get("resources")]
def __init__(self, underlying: SCIMResponse):
self.underlying = underlying
self.url = underlying.url
self.status_code = underlying.status_code
self.headers = underlying.headers
self.raw_body = underlying.raw_body
self.body = underlying.body
self._snake_cased_body = None
class ReadUserResponse(SCIMResponse):
user: User
@property
def user(self) -> User:
return User(**self.snake_cased_body)
def __init__(self, underlying: SCIMResponse):
self.underlying = underlying
self.url = underlying.url
self.status_code = underlying.status_code
self.headers = underlying.headers
self.raw_body = underlying.raw_body
self.body = underlying.body
self._snake_cased_body = None
class UserCreateResponse(SCIMResponse):
user: User
@property
def user(self) -> User:
return User(**self.snake_cased_body)
def __init__(self, underlying: SCIMResponse):
self.underlying = underlying
self.url = underlying.url
self.status_code = underlying.status_code
self.headers = underlying.headers
self.raw_body = underlying.raw_body
self.body = underlying.body
self._snake_cased_body = None
class UserPatchResponse(SCIMResponse):
user: User
@property
def user(self) -> User:
return User(**self.snake_cased_body)
def __init__(self, underlying: SCIMResponse):
self.underlying = underlying
self.url = underlying.url
self.status_code = underlying.status_code
self.headers = underlying.headers
self.raw_body = underlying.raw_body
self.body = underlying.body
self._snake_cased_body = None
class UserUpdateResponse(SCIMResponse):
user: User
@property
def user(self) -> User:
return User(**self.snake_cased_body)
def __init__(self, underlying: SCIMResponse):
self.underlying = underlying
self.url = underlying.url
self.status_code = underlying.status_code
self.headers = underlying.headers
self.raw_body = underlying.raw_body
self.body = underlying.body
self._snake_cased_body = None
class UserDeleteResponse(SCIMResponse):
def __init__(self, underlying: SCIMResponse):
self.underlying = underlying
self.url = underlying.url
self.status_code = underlying.status_code
self.headers = underlying.headers
self.raw_body = underlying.raw_body
self.body = underlying.body
self._snake_cased_body = None
# ---------------------------------
# Groups
# ---------------------------------
class SearchGroupsResponse(SCIMResponse):
groups: List[Group]
@property
def groups(self) -> List[Group]:
return [Group(**r) for r in self.snake_cased_body.get("resources")]
def __init__(self, underlying: SCIMResponse):
self.underlying = underlying
self.url = underlying.url
self.status_code = underlying.status_code
self.headers = underlying.headers
self.raw_body = underlying.raw_body
self.body = underlying.body
self._snake_cased_body = None
class ReadGroupResponse(SCIMResponse):
group: Group
@property
def group(self) -> Group:
return Group(**self.snake_cased_body)
def __init__(self, underlying: SCIMResponse):
self.underlying = underlying
self.url = underlying.url
self.status_code = underlying.status_code
self.headers = underlying.headers
self.raw_body = underlying.raw_body
self.body = underlying.body
self._snake_cased_body = None
class GroupCreateResponse(SCIMResponse):
group: Group
@property
def group(self) -> Group:
return Group(**self.snake_cased_body)
def __init__(self, underlying: SCIMResponse):
self.underlying = underlying
self.url = underlying.url
self.status_code = underlying.status_code
self.headers = underlying.headers
self.raw_body = underlying.raw_body
self.body = underlying.body
self._snake_cased_body = None
class GroupPatchResponse(SCIMResponse):
def __init__(self, underlying: SCIMResponse):
self.underlying = underlying
self.url = underlying.url
self.status_code = underlying.status_code
self.headers = underlying.headers
self.raw_body = underlying.raw_body
self.body = underlying.body
self._snake_cased_body = None
class GroupUpdateResponse(SCIMResponse):
group: Group
@property
def group(self) -> Group:
return Group(**self.snake_cased_body)
def __init__(self, underlying: SCIMResponse):
self.underlying = underlying
self.url = underlying.url
self.status_code = underlying.status_code
self.headers = underlying.headers
self.raw_body = underlying.raw_body
self.body = underlying.body
self._snake_cased_body = None
class GroupDeleteResponse(SCIMResponse):
def __init__(self, underlying: SCIMResponse):
self.underlying = underlying
self.url = underlying.url
self.status_code = underlying.status_code
self.headers = underlying.headers
self.raw_body = underlying.raw_body
self.body = underlying.body
self._snake_cased_body = None
| mit | 5,657,046,205,296,565,000 | 27.370787 | 85 | 0.614521 | false | 3.993147 | false | false | false |
eti-p-doray/gis-upir | spat/trajectory/load.py | 1 | 2871 | import datetime, logging
import csv
import pyproj
def load_csv(data):
rows = iter(data)
header = next(rows)
latitude_idx = next(i for i,v in enumerate(header) if v == "latitude")
longitude_idx = next(i for i,v in enumerate(header) if v == "longitude")
speed_idx = next(i for i,v in enumerate(header) if v == "speed")
altitude_idx = next(i for i,v in enumerate(header) if v == "altitude")
time_idx = next(i for i,v in enumerate(header) if v == "recorded_at")
hort_acc_idx = next(i for i,v in enumerate(header) if v == "hort_accuracy")
vert_acc_idx = next(i for i,v in enumerate(header) if v == "vert_accuracy")
src_node_idx = next(i for i,v in enumerate(header) if v == "src")
dst_node_idx = next(i for i,v in enumerate(header) if v == "dst")
src_proj = pyproj.Proj(init='epsg:4326')
dst_proj = pyproj.Proj(init='epsg:2950')
observations = []
accuracy = []
link = []
previous_id = -1
previous_time = None
for row in data:
current_id = row[0]
if current_id != previous_id:
if observations:
logging.info("loading %s", previous_id)
yield {
'observations': observations,
'accuracy': accuracy,
'id': previous_id,
'link': link
}
observations = []
accuracy = []
link = []
previous_id = current_id
previous_time = None
#if current_id != "9160":
# continue
current_time = datetime.datetime.strptime(row[time_idx], '%Y-%m-%d %H:%M:%S')
while (previous_time is not None and
previous_time + datetime.timedelta(seconds=1) < current_time):
observations.append(None)
accuracy.append(None)
link.append(None)
previous_time += datetime.timedelta(seconds=1)
previous_time = current_time
try:
coord = pyproj.transform(src_proj, dst_proj,
float(row[longitude_idx]),
float(row[latitude_idx]))
except RuntimeError:
previous_id = -1
continue
obs = [coord[0], coord[1], float(row[speed_idx])]
quantile = 1.96
acc = [float(row[hort_acc_idx])/quantile, float(row[vert_acc_idx])/quantile]
observations.append(obs)
accuracy.append(acc)
link.append((int(row[src_node_idx]), int(row[dst_node_idx])))
def load_all(files, max_count):
for filepath in files:
with open(filepath) as csvfile:
data = csv.reader(csvfile)
for trajectory in load_csv(data):
yield trajectory
max_count -= 1
if max_count == 0:
return | mit | 5,230,722,202,053,931,000 | 35.35443 | 85 | 0.536747 | false | 3.864065 | false | false | false |
pthcode/libpth | libpth/utils.py | 1 | 2180 | import os
import time
import tempfile
import functools
from . import metafile
def rate_limit(interval):
"""
Rate limiting decorator which allows the wrapped function to be
called at most once per `interval`.
"""
def decorator(fn):
last_called = [0.0] # This is a list because primitives are constant within the closure.
@functools.wraps(fn)
def wrapper(*args, **kwargs):
elapsed = time.time() - last_called[0]
remaining = interval - elapsed
if remaining > 0:
time.sleep(remaining)
last_called[0] = time.time()
return fn(*args, **kwargs)
return wrapper
return decorator
def locate(root, match_function, ignore_dotfiles=True):
'''
Yields all filenames within `root` for which match_function returns
True.
'''
for path, dirs, files in os.walk(root):
for filename in (os.path.abspath(os.path.join(path, filename))
for filename in files if match_function(filename)):
if ignore_dotfiles and os.path.basename(filename).startswith('.'):
pass
else:
yield filename
def ext_matcher(*extensions):
'''
Returns a function which checks if a filename has one of the specified
extensions.
'''
return lambda f: os.path.splitext(f)[-1].lower() in set(extensions)
def _add_source(meta):
meta['info']['source'] = 'PTH'
def make_torrent(path, passkey, output_dir=None):
'''
Creates a torrent suitable for uploading to PTH.
- `path`: The directory or file to upload.
- `passkey`: Your tracker passkey.
- `output_dir`: The directory where the torrent will be created. If unspecified, {} will be used.
'''.format(tempfile.tempdir)
if output_dir is None:
output_dir = tempfile.tempdir
torrent_path = tempfile.mktemp(dir=output_dir, suffix='.torrent')
torrent = metafile.Metafile(torrent_path)
announce_url = 'https://please.passtheheadphones.me/{}/announce'.format(passkey)
torrent.create(path, [announce_url], private=True, callback=_add_source)
return torrent_path
| gpl-3.0 | 3,381,352,357,227,602,400 | 28.863014 | 101 | 0.633028 | false | 4.082397 | false | false | false |
sergeLabo/pygame_server_gui | server_gui.py | 1 | 6213 | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
# server_gui.py
#############################################################################
# Copyright (C) Labomedia February 2015
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franproplin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#############################################################################
import pygame
import sys
pygame.init()
clock = pygame.time.Clock()
BLACK = 0, 0, 0
WHITE = 255, 255, 255
CIEL = 0, 200, 255
RED = 255, 0, 0
ORANGE = 255, 100, 0
GREEN = 0, 255, 0
class Button:
'''Ajout d'un bouton avec un texte sur img
Astuce: ajouter des espaces dans les textes pour avoir une même largeur
de boutons
dx, dy décalage du bouton par rapport au centre
action si click
Texte noir
'''
def __init__(self, fond, text, color, font, dx, dy):
self.fond = fond
self.text = text
self.color = color
self.font = font
self.dec = dx, dy
self.state = False # enable or not
self.title = self.font.render(self.text, True, BLACK)
textpos = self.title.get_rect()
textpos.centerx = self.fond.get_rect().centerx + self.dec[0]
textpos.centery = self.dec[1]
self.textpos = [textpos[0], textpos[1], textpos[2], textpos[3]]
self.rect = pygame.draw.rect(self.fond, self.color, self.textpos)
self.fond.blit(self.title, self.textpos)
def update_button(self, fond, action=None):
self.fond = fond
mouse_xy = pygame.mouse.get_pos()
over = self.rect.collidepoint(mouse_xy)
if over:
action()
if self.color == RED:
self.color = GREEN
self.state = True
elif self.color == GREEN:
# sauf les + et -, pour que ce soit toujours vert
if len(self.text) > 5: # 5 char avec les espaces
self.color = RED
self.state = False
# à la bonne couleur
self.rect = pygame.draw.rect(self.fond, self.color, self.textpos)
self.fond.blit(self.title, self.textpos)
def display_button(self, fond):
self.fond = fond
self.rect = pygame.draw.rect(self.fond, self.color, self.textpos)
self.fond.blit(self.title, self.textpos)
class Game:
def __init__(self):
self.screen = pygame.display.set_mode((640, 480))
self.level = 1
self.loop = True
# Définition de la police
self.big = pygame.font.SysFont('freesans', 48)
self.small = pygame.font.SysFont('freesans', 36)
self.create_fond()
self.create_button()
def update_textes(self):
self.textes = [ ["Buggy Server", ORANGE, self.big, 0, 50],
["Level", BLACK, self.small, 0, 150],
[str(self.level), BLACK, self.small, 0, 200]]
def create_fond(self):
# Image de la taille de la fenêtre
self.fond = pygame.Surface(self.screen.get_size())
# En bleu
self.fond.fill(CIEL)
def create_button(self):
self.reset_button = Button(self.fond, " Reset ", RED, self.small, 0, 300)
self.start_button = Button(self.fond, " Start ", RED, self.small, 0, 360)
self.quit_button = Button(self.fond, " Quit ", RED, self.small, 0, 420)
self.moins_button = Button(self.fond, " - ", GREEN, self.small, -100, 200)
self.plus_button = Button(self.fond, " + ", GREEN, self.small, 100, 200)
def display_text(self, text, color, font, dx, dy):
'''Ajout d'un texte sur fond. Décalage dx, dy par rapport au centre.
'''
mytext = font.render(text, True, color) # True pour antialiasing
textpos = mytext.get_rect()
textpos.centerx = self.fond.get_rect().centerx + dx
textpos.centery = dy
self.fond.blit(mytext, textpos)
def plus(self):
self.level += 1
if self.level == 6: self.level = 5
def moins(self):
self.level += -1
if self.level == 0: self.level = 1
def infinite_loop(self):
while self.loop:
self.create_fond()
# Boutons
self.reset_button.display_button(self.fond)
self.start_button.display_button(self.fond)
self.quit_button.display_button(self.fond)
self.moins_button.display_button(self.fond)
self.plus_button.display_button(self.fond)
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
self.reset_button.update_button(self.fond, action=reset)
self.start_button.update_button(self.fond, action=start)
self.quit_button.update_button(self.fond, action=gamequit)
self.moins_button.update_button(self.fond, action=self.moins)
self.plus_button.update_button(self.fond, action=self.plus)
self.update_textes()
for text in self.textes:
self.display_text(text[0], text[1], text[2],
text[3], text[4])
# Ajout du fond dans la fenêtre
self.screen.blit(self.fond, (0, 0))
# Actualisation de l'affichage
pygame.display.update()
# 10 fps
clock.tick(10)
def reset():
print("reset")
def start():
print("start")
def gamequit():
print("Quit")
pygame.quit()
sys.exit()
if __name__ == '__main__':
game = Game()
game.infinite_loop()
| gpl-2.0 | -2,229,084,934,627,521,800 | 33.670391 | 85 | 0.573477 | false | 3.440133 | false | false | false |
MEhlinger/rpi_pushbutton_games | fallingSkies/fallingSkies.py | 1 | 4396 | # Main class
# Coded in Python 2.7.10 with PyGame
# by Brett Burley-Inners
# Update :: 11/19/2015
import pygame, time, random, sys
import player, skyChunk
def main():
# Initial setup
pygame.init()
font = pygame.font.SysFont("monospace", 15)
pygame.key.set_repeat(1, 5)
clock = pygame.time.Clock() # clock object for fps/ticks
display_width = 320 # default width (pixels)
display_height = 240 # default height (pixels)
gameScreen = pygame.display.set_mode((display_width, display_height))
pygame.display.set_caption("The Sky is Falling")
# Colors
white = (255, 255, 255)
darkGray = (50, 50, 50)
darkerGray = (25, 25, 25)
lightGray = (150, 150, 150)
rLightGray = (200, 200, 200)
rrLightGray = (220, 220, 220)
black = (0, 0, 0)
darkRed = (150, 0, 0)
lightBlue = (55, 210, 225)
# Keep the game loop running
RUNNING = True
notPlaying = True # for the menu loop
skyIsFalling = True # for the loop to make stuff fall
# Initialize a few variables
tickCounter = 0 # count the number of ticks
score = 0
xChange = 0 # change in x-coordinate to move player along x-axis
xPosition = display_width / 2 # player start location
size = 20 # size of player
fallingSkies = [] # list of falling sky objects on the screen
# The Player!
thePlayer = player.Player(gameScreen, 15, xPosition, display_height - 35, lightGray, display_width)
# to display Play, Quit, and Score messages
def message(text, color, x, y):
messageToDisplay = font.render(text, True, color)
gameScreen.blit(messageToDisplay, [x, y])
# Game loop
while RUNNING:
clock.tick(30) # number of times the screen refreshes each second
while notPlaying:
gameScreen.fill(darkerGray)
message("'RETURN' to Play.", rLightGray, 5, 5)
message("'Q' to Quit.", rLightGray, 5, 20)
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_q:
pygame.key.set_repeat()
return
if event.key == pygame.K_RETURN:
notPlaying = False
skyIsFalling = True
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT and not isOverLeftBound:
xChange -= 10
if event.key == pygame.K_RIGHT and not isOverRightBound:
xChange += 10
elif event.key == pygame.K_q:
pygame.key.set_repeat()
return
gameScreen.fill(darkerGray)
# Score display
message(("Score: " + str(score)), rLightGray, 10, display_height - 18)
# Movement logic:
xPosition += xChange # add the change in direction to current position
thePlayer.redrawPlayer(xPosition) # redraw Player at new position
isOverLeftBound = thePlayer.isOverLeftBound() # check left bound
isOverRightBound = thePlayer.isOverRightBound() # check right bound
xChange = 0 # set change back to 0 (stops accelerating effect)
tickCounter += 1
# Sky fall loop (appends FallingSky object every 10 ticks)
if skyIsFalling and tickCounter > 10:
# Append FallingSky objects to the list
fallingSkies.append(skyChunk.SkyChunk(gameScreen, random.randrange(5, 15), random.randrange(1, display_width), -5, lightBlue, random.randrange(1, 2), score, display_height, fallingSkies))
tickCounter = 0
# Using the list of FallingSky objects
for i in fallingSkies:
i.fall() # makes them move
score += i.returnScore()
if len(fallingSkies) > 1000:
del fallingSkies[0] # remove first item if list is too large
if i.collideWithPlayer(thePlayer.getPlayerX(), thePlayer.getPlayerY(), thePlayer.getPlayerSize()):
skyIsFalling = False
del fallingSkies[:] # clear the entire list
notPlaying = True
score = 0 # reset the score
# *screen tick*
pygame.display.update()
# That's all, folks!
if __name__ == "__main__":
main()
| mit | 944,511,531,504,203,900 | 32.815385 | 199 | 0.594404 | false | 3.866315 | false | false | false |
lifemapper/core | LmBackend/common/layer_tools.py | 1 | 8515 | """Module containing compute environment layer management code
Todo:
* Add convert tool to config
* Use verify module
* Skip if exists
* Alphabetize
"""
import os
import subprocess
from time import sleep
import numpy
from osgeo import gdal
from LmCommon.common.lmconstants import (LMFormat, DEFAULT_NODATA, ENCODING)
from LmCompute.common.lmconstants import (
CONVERT_JAVA_CMD, CONVERT_TOOL, ME_CMD)
WAIT_SECONDS = 30
# .............................................................................
def convert_and_modify_ascii_to_tiff(asc_file_name, tiff_file_name, scale=None,
multiplier=None, nodata_value=127,
data_type='int'):
"""Converts an ASCII file to a GeoTiff.
Args:
asc_file_name (str): The file name of the existing ASCII grid to
convert.
tiff_file_name (str): The file path for the new tiff file.
scale (None or tuple): If provided, must be a tuple of the scale
minimum and maximum values.
multiplier (numeric): If provided, multiply all data values in teh grid
by this number.
nodata_value: The no data value to use for the new value-adjusted
layer.
data_type: The data type for the resulting raster.
"""
if data_type.lower() == 'int':
np_type = numpy.int8
gdal_type = gdal.GDT_Byte
else:
raise Exception('Unknown data type')
src_ds = gdal.Open(asc_file_name)
band = src_ds.GetRasterBand(1)
band.GetStatistics(0, 1)
in_nodata_value = band.GetNoDataValue()
data = src_ds.ReadAsArray(0, 0, src_ds.RasterXSize, src_ds.RasterYSize)
# If scale
if scale is not None:
scale_min, scale_max = scale
lyr_min = band.GetMinimum()
lyr_max = band.GetMaximum()
def scale_func(cell_value):
"""Function to scale layer values.
"""
if cell_value == in_nodata_value:
return nodata_value
return (scale_max - scale_min) * (
(cell_value - lyr_min) / (lyr_max - lyr_min)) + scale_min
data = numpy.vectorize(scale_func)(data)
# If multiply
elif multiplier is not None:
def multiply_func(cell_value):
"""Function to multiply layer values.
"""
if cell_value == in_nodata_value:
return nodata_value
return multiplier * cell_value
data = numpy.vectorize(multiply_func)(data)
data = data.astype(np_type)
driver = gdal.GetDriverByName('GTiff')
dst_ds = driver.Create(
tiff_file_name, src_ds.RasterXSize, src_ds.RasterYSize, 1, gdal_type)
dst_ds.GetRasterBand(1).WriteArray(data)
dst_ds.GetRasterBand(1).SetNoDataValue(nodata_value)
dst_ds.GetRasterBand(1).ComputeStatistics(True)
dst_ds.SetProjection(src_ds.GetProjection())
dst_ds.SetGeoTransform(src_ds.GetGeoTransform())
driver = None
dst_ds = None
src_ds = None
# .............................................................................
def convert_ascii_to_mxe(lyr_dir):
"""Converts a directory of ASCII files to MXEs.
lyr_dir: A directory containing ASCII grids that should be converted.
"""
# Run Maxent converter
me_convert_cmd = '{0} {1} {2} -t {3} asc {3} mxe'.format(
CONVERT_JAVA_CMD, ME_CMD, CONVERT_TOOL, lyr_dir)
convert_proc = subprocess.Popen(me_convert_cmd, shell=True)
while convert_proc.poll() is None:
print('Waiting for layer conversion (asc to mxe) to finish...')
sleep(WAIT_SECONDS)
# .............................................................................
def convert_layers_in_dir(layer_dir):
"""Converts all layers in directory from tiffs to asciis and mxes
Args:
layer_dir (str):The directory to traverse through looking for layers to
convert
"""
mxe_dirs = set([])
for my_dir, _, files in os.walk(layer_dir):
for file_name in files:
tiff_file_name = os.path.join(my_dir, file_name)
basename, ext = os.path.splitext(tiff_file_name)
if ext.lower() == LMFormat.GTIFF.ext:
ascii_file_name = '{}{}'.format(basename, LMFormat.ASCII.ext)
mxe_file_name = '{}{}'.format(basename, LMFormat.MXE.ext)
if not os.path.exists(ascii_file_name):
print('Converting: {}'.format(tiff_file_name))
convert_tiff_to_ascii(tiff_file_name, ascii_file_name)
if not os.path.exists(mxe_file_name):
mxe_dirs.add(my_dir)
for lyr_dir in mxe_dirs:
print('Converting ASCIIs in {} to MXEs'.format(lyr_dir))
convert_ascii_to_mxe(lyr_dir)
# .............................................................................
def convert_tiff_to_ascii(tiff_file_name, asc_file_name, header_precision=6):
"""Converts an existing GeoTIFF file into an ASCII grid.
Args:
tiff_file_name (str): The path to an existing GeoTIFF file
asc_file_name (str): The output path for the new ASCII grid
header_precision (int): The number of decimal places to keep in the
ASCII grid headers. Setting to None skips.
Note:
Headers must match exactly for Maxent so truncating them eliminates
floating point differences
Todo:
Evaluate if this can all be done with GDAL.
"""
# Use GDAL to generate ASCII Grid
drv = gdal.GetDriverByName('AAIGrid')
ds_in = gdal.Open(tiff_file_name)
# Get header information from tiff file
left_x, x_res, _, ul_y, _, y_res = ds_in.GetGeoTransform()
left_y = ul_y + (ds_in.RasterYSize * y_res)
cols = ds_in.RasterXSize
rows = ds_in.RasterYSize
# Force a NODATA value if missing from TIFF before copying to ASCII
nodata = ds_in.GetRasterBand(1).GetNoDataValue()
if nodata is None:
ds_in.GetRasterBand(1).SetNoDataValue(DEFAULT_NODATA)
nodata = DEFAULT_NODATA
# If header precision is not None, round vlaues
if header_precision is not None:
left_x = round(left_x, header_precision)
left_y = round(left_y, header_precision)
x_res = round(x_res, header_precision)
options = ['FORCE_CELLSIZE=True']
drv.CreateCopy(asc_file_name, ds_in, 0, options)
ds_in = None
# Rewrite ASCII header with tiff info
output = []
output.append('ncols {}\n'.format(cols))
output.append('nrows {}\n'.format(rows))
output.append('xllcorner {}\n'.format(left_x))
output.append('yllcorner {}\n'.format(left_y))
output.append('cellsize {}\n'.format(x_res))
output.append('NODATA_value {}\n'.format(int(nodata)))
past_header = False
with open(asc_file_name, 'r', encoding=ENCODING) as asc_in:
for line in asc_in:
low_line = line.lower()
if not past_header and any([
low_line.startswith(test_str) for test_str in [
'ncols', 'nrows', 'xllcorner', 'yllcorner', 'cellsize',
'dx', 'dy', 'nodata_value']]):
pass
else:
past_header = True
output.append(line)
# Rewrite ASCII Grid
with open(asc_file_name, 'w', encoding=ENCODING) as asc_out:
for line in output:
asc_out.write(line)
# .............................................................................
def process_layers_json(layer_json, sym_dir=None):
"""Process layer JSON and return file names.
Args:
layer_json (json): A JSON object with an entry for layers (list) and a
mask. Each layer should be an object with an identifier and / or
url.
sym_dir: If provided, symbolically link the layers in this directory.
Note:
Assumes that layer_json is an object with layers and mask
"""
layers = []
for lyr_obj in layer_json['layer']:
layers.append(lyr_obj['path'])
lyr_ext = os.path.splitext(layers[0])[1]
if sym_dir is not None:
new_layers = []
for i, layer_i in enumerate(layers):
new_file_name = os.path.join(
sym_dir, "layer{}{}".format(i, lyr_ext))
if not os.path.exists(new_file_name):
os.symlink(layer_i, new_file_name)
new_layers.append(new_file_name)
return new_layers
return layers
| gpl-3.0 | 6,454,074,297,704,611,000 | 34.041152 | 79 | 0.577569 | false | 3.695747 | false | false | false |
kursawe/MCSTracker | src/tracking/core.py | 1 | 78191 | # Copyright 2016 Jochen Kursawe. See the LICENSE file at the top-level directory
# of this distribution and at https://github.com/kursawe/MCSTracker/blob/master/LICENSE.
"""In this the main tracking functions are defined
"""
import sys
import os
from .maximum_common_subgraph_finder import *
import mesh
from mesh.in_out import _natural_keys
import glob
import copy
import warnings
from networkx.algorithms.components.connected import connected_component_subgraphs
def track(mesh_one, mesh_two):
"""Find a mapping between the cell ids in both frames and assigns the global ids accordingly.
Parameters
----------
mesh_one : Mesh type
First mesh
mesh_two : Mesh type
Second mesh
Returns
-------
mapped_ids : the ids of elements that were identified in both meshes
"""
subgraph_finder = LocalisedSubgraphFinder(mesh_one, mesh_two)
subgraph_finder.find_maximum_common_subgraph()
post_processor = PostProcessor(mesh_one, mesh_two, subgraph_finder.largest_mappings)
post_processor.index_global_ids_from_largest_mappings()
post_processor.tidy_current_mapping()
mapped_ids = post_processor.post_process_with_data()
return mapped_ids
def track_and_write_sequence(input_path, output_path, start_number = 1, number_meshes = None):
"""Reads a sequence and writes the tracked data into consecutive meshes
Cells that are present in multiple frames will have the same global ids,
and each other cell will have a distinct non-recurring global id.
Parameters
----------
input_path : string
filename of seedwater-segmented data frames, without the file-endings
and numberings
output_path : string
filename where the output should be saved, without file ending
this name will be extended with a number and .mesh for each segmented
frame
start_number : int
mesh number to be started with (indexing starts at one)
number_meshes : int
index of the last mesh we want to track (indexing starts at one)
"""
mesh_sequence = mesh.read_sequence_from_data(input_path, start_number, number_meshes)
previous_sequence = mesh.read_sequence_from_data(input_path, start_number, number_meshes)
next_sequence = mesh.read_sequence_from_data(input_path, start_number, number_meshes)
# track all consecutive time frames individually
step_sequence = []
for counter, this_mesh in enumerate(mesh_sequence):
if counter > 0:
previous_mesh = previous_sequence[counter -1]
corresponding_mesh = next_sequence[counter]
try:
track(previous_mesh, corresponding_mesh)
except FirstIndexException:
print "Could not find first index in tracking step " + str(counter)
step_sequence.append([previous_mesh, corresponding_mesh])
# give global ids to the first mesh
global_ids = []
for counter, element in enumerate(mesh_sequence[0].elements):
element.global_id = counter
global_ids.append(counter)
element.is_in_reduced_mcs_previous = False
mesh_sequence[0].index_global_ids()
# trace global ids through all the meshes, making new ones if necessary
for counter, this_mesh in enumerate(mesh_sequence):
if counter == 0:
corresponding_mesh_next_step = step_sequence[counter][0]
for element_counter, element in enumerate(this_mesh.elements):
element.is_in_reduced_mcs_next = corresponding_mesh_next_step.elements[element_counter].is_in_reduced_mcs_next
if counter > 0:
previous_mesh = step_sequence[counter - 1][0]
corresponding_mesh = step_sequence[counter - 1][1]
if counter < len(step_sequence):
corresponding_mesh_next_step = step_sequence[counter][0]
for element_counter, element in enumerate(this_mesh.elements):
corresponding_element = corresponding_mesh.get_element_with_frame_id(element.id_in_frame)
this_global_id = corresponding_element.global_id
if this_global_id is None:
new_global_id = max(global_ids) + 1
global_ids.append( max(global_ids) + 1 )
element.global_id = new_global_id
element.is_new = True
else:
previous_frame_id = previous_mesh.get_element_with_global_id(this_global_id).id_in_frame
previous_global_id = mesh_sequence[counter - 1].get_element_with_frame_id(previous_frame_id).global_id
element.global_id = previous_global_id
try:
element.is_in_reduced_mcs_previous = corresponding_element.is_in_reduced_mcs_previous
except:
element.is_in_reduced_mcs_previous = False
if counter < len(step_sequence):
try:
element.is_in_reduced_mcs_next = corresponding_mesh_next_step.elements[element_counter].is_in_reduced_mcs_next
except(AttributeError):
element.is_in_reduced_mcs_next = False
else:
element.is_in_reduced_mcs_next = False
this_mesh.index_global_ids()
#now, save the mesh sequence
for counter, this_mesh in enumerate(mesh_sequence):
this_file_name = output_path + str(start_number + counter - 1) + '.mesh'
this_mesh.save(this_file_name)
def analyse_tracked_sequence(input_path):
"""collect summary statistics on tracked data
Parameters
----------
input_path : string
Path to the sequence that should be analysed.
Sequences are numbered already tracked meshes.
Returns
-------
data_collector : DataCollector instance
This object has member variables for various summary statistics
"""
mesh_sequence = mesh.load_sequence(input_path)
return DataCollector(mesh_sequence)
def plot_tracked_sequence( sequence_path, image_path, segmented_path, out_path ):
"""Plot a tracked sequence of meshes.
This creates three types of plots for the entire sequence.
The first type of plot overlays the experimental data, the segmentation, and the
tracking outcome. Each tracked cell is given an individual colour and an id that is
included in the overlay.
The second type of plots illustrates the maximum common subgraphs.
The third type of plots shows the tracked tesselation of polygons
Parameters
----------
sequence_path : string
path to the tracked mesh sequence (contains a series of .mesh files)
image_path : string
path to the sequence of original images
segmented_path : string
path where the overlay should be saved. Will be created if required.
"""
mesh_sequence = mesh.load_sequence( sequence_path )
list_of_image_files = glob.glob( os.path.join( image_path , '*.tif') )
list_of_image_files.sort(key=_natural_keys)
list_of_segmented_files = glob.glob( os.path.join( segmented_path , '*.tif') )
list_of_segmented_files.sort(key=_natural_keys)
# get maximal global id
max_global_id = 0
for mesh_instance in mesh_sequence:
this_max_global_id = mesh_instance.get_max_global_id()
if this_max_global_id > max_global_id:
max_global_id = this_max_global_id
if not os.path.isdir(out_path):
os.mkdir( out_path )
overlay_path = os.path.join(out_path, 'overlay')
if not os.path.isdir(overlay_path):
os.mkdir( overlay_path )
polygon_path = os.path.join(out_path, 'polygons')
if not os.path.isdir(polygon_path):
os.mkdir( polygon_path )
mcs_path = os.path.join(out_path, 'mcs')
if not os.path.isdir(mcs_path):
os.mkdir( mcs_path )
for mesh_counter, mesh_instance in enumerate( mesh_sequence ):
this_image_path = list_of_image_files[mesh_counter]
this_segmented_path = list_of_segmented_files[mesh_counter]
out_file_name = os.path.split( this_image_path.replace('.tif', '_overlay.png') )[1]
overlay_file_path = os.path.join(overlay_path, out_file_name)
mesh_instance.plot_tracked_data(overlay_file_path, this_image_path, this_segmented_path, max_global_id)
polygon_file_name = os.path.join( polygon_path, out_file_name )
mesh_instance.plot( polygon_file_name, color_by_global_id = True,
total_number_of_global_ids = max_global_id)
mcs_file_name = os.path.join( mcs_path, out_file_name )
mesh_instance.plot( mcs_file_name, color_by_global_id = True,
total_number_of_global_ids = max_global_id, reduced_mcs_only = True )
class DataCollector():
"""A class for analysing tracked sequences."""
def __init__(self, mesh_sequence):
"""The constructor of the DataCollector
Parameters
----------
mesh_sequence : list of Mesh instances
The entries should have global ids in them.
"""
self.mesh_sequence = mesh_sequence
self.collect_all_steps()
self.calculate_average_cell_area()
self.generate_death_statistics()
self.generate_centroid_statistics()
self.generate_edge_difference_statistics()
self.generate_tracking_statistics()
self.generate_rosette_statistics()
self.output_directory = None
def set_output_directory(self, output_dir):
"""Sets the output dir.
Parameters
----------
output_dir : string
"""
if not os.path.exists(output_dir):
os.mkdir(output_dir)
self.output_directory = output_dir
def write_area_statistics(self):
"""Write the area statistics"""
area_statistics = []
for this_mesh in self.mesh_sequence:
this_area = this_mesh.calculate_total_area()
this_number_cells = this_mesh.get_num_elements()
this_average = this_area/this_number_cells
area_statistics.append( this_average )
area_statistics_np = np.array(area_statistics)
np.savetxt(os.path.join(self.output_directory, 'area_statistics.csv' ), area_statistics_np)
def write_rearrangement_statistics(self):
"""Write the area statistics"""
number_of_rearrangements = []
for step in self.steps:
number_of_rearrangements.append( step.number_of_cells_gaining_edges + step.number_of_cells_loosing_edges )
rearrangement_statistics_np = np.array(number_of_rearrangements)
np.savetxt(os.path.join(self.output_directory, 'rearrangement_statistics.csv' ), rearrangement_statistics_np)
def write_tracked_cell_statistics(self):
"""Write tracked_cells_statistics"""
number_of_tracked_cells = []
number_of_total_cells = []
these_data = np.zeros( (len(self.steps), 2 ), dtype = 'int')
for step_counter, step in enumerate( self.steps ):
these_data[step_counter, 0] = step.mesh_one.get_num_elements()
these_data[step_counter, 1] = step.number_of_tracked_cells
np.savetxt(os.path.join(self.output_directory, 'tracking_statistics.csv' ), these_data)
def write_dying_cells(self):
"""make a list of all global ids that are removed"""
np.savetxt( os.path.join(self.output_directory, 'dying_cells.csv'),
self.global_ids_of_dying_cells )
def write_cell_area_statistics(self):
"""write the area evolution for each global id"""
maximal_global_id = 0
for this_mesh in self.mesh_sequence:
this_max_global_id = this_mesh.get_max_global_id()
if this_max_global_id > maximal_global_id:
maximal_global_id = this_max_global_id
cell_area_data = np.zeros( (maximal_global_id + 1, len(self.mesh_sequence)) )
for mesh_counter, this_mesh in enumerate(self.mesh_sequence):
for global_id in range(maximal_global_id + 1):
try:
this_element = this_mesh.get_element_with_global_id( global_id )
this_area = this_element.calculate_area()
except KeyError:
this_area = np.nan
cell_area_data[global_id, mesh_counter] = this_area
np.savetxt(os.path.join(self.output_directory, 'cell_area_statistics.csv' ), cell_area_data)
def collect_all_steps(self):
"""Generate StepDataCollectors for each time step"""
self.steps = []
for counter, this_mesh in enumerate(self.mesh_sequence):
if counter > 0:
previous_mesh = self.mesh_sequence[counter - 1]
self.steps.append(StepDataCollector(previous_mesh,
this_mesh,
counter))
def generate_rosette_statistics(self):
"Get the total number of rosettes in all meshes"
self.number_of_rosettes = 0
for this_mesh in self.mesh_sequence:
self.number_of_rosettes += this_mesh.count_rosettes()
def generate_death_statistics(self):
"""Get the total number of dying cells in the sequence"""
self.number_dying_cells = 0
self.global_ids_of_dying_cells = []
for step in self.steps:
self.number_dying_cells += step.number_dying_cells
self.global_ids_of_dying_cells += step.global_ids_of_dying_cells
def generate_centroid_statistics(self):
"""Get statistics on centroid displacement"""
self.centroid_displacements = self.steps[0].centroid_displacements
for step in self.steps[1:]:
step.centroid_displacements = np.hstack((self.centroid_displacements,
step.centroid_displacements))
self.centroid_displacements /= np.sqrt(self.average_cell_area)
self.maximal_centroid_displacement = np.max(self.centroid_displacements)
self.minimal_centroid_displacement = np.min(self.centroid_displacements)
self.average_centroid_displacement = np.mean(self.centroid_displacements)
def calculate_average_cell_area(self):
"Calculate the average area of all cells of all meshes in the sequence"
total_area = 0
total_number_of_cells = 0
for this_mesh in self.mesh_sequence:
total_area += this_mesh.calculate_total_area()
total_number_of_cells += this_mesh.get_num_elements()
self.average_cell_area = total_area/total_number_of_cells
def generate_edge_difference_statistics(self):
"""Collect statistics on how many cells gain vs loose edges in this step"""
self.number_of_cells_gaining_edges = 0
self.number_of_cells_loosing_edges = 0
for step in self.steps:
self.number_of_cells_gaining_edges += step.number_of_cells_gaining_edges
self.number_of_cells_loosing_edges += step.number_of_cells_loosing_edges
def generate_tracking_statistics(self):
"""Generate statistics about number of tracked cells"""
shared_global_ids = set(self.mesh_sequence[0].global_id_dictionary.keys())
for this_mesh in self.mesh_sequence[1:]:
shared_global_ids.intersection_update(set(this_mesh.global_id_dictionary.keys()))
self.number_of_tracked_cells = len(shared_global_ids)
self.global_ids_of_tracked_cells = list(shared_global_ids)
class StepDataCollector():
"""A class to analyse two consecutive tracked meshes"""
def __init__(self, mesh_one, mesh_two, step_number = 0):
"""The constructor of the StepDataCollector
Parameters
----------
mesh_one : Mesh instance
first mesh
mesh_two : Mesh instance
second_mesh
step_number : int
number of this step in the sequence
"""
self.mesh_one = mesh_one
self.mesh_two = mesh_two
self.step_number = step_number
self.generate_tracking_statistics()
self.generate_death_statistics()
self.generate_centroid_statistics()
self.generate_edge_difference_statistics()
def generate_tracking_statistics(self):
"""Generate statistics about number of tracked cells"""
mesh_one_global_ids = self.mesh_one.global_id_dictionary.keys()
mesh_two_global_ids = self.mesh_two.global_id_dictionary.keys()
shared_global_ids = set.intersection(set(mesh_one_global_ids),
set(mesh_two_global_ids))
self.number_of_tracked_cells = len(shared_global_ids)
self.global_ids_of_tracked_cells = list(shared_global_ids)
def generate_death_statistics(self):
"""Collect the number of dying cells in this step
"""
self.number_dying_cells = 0
self.global_ids_of_dying_cells = []
for element in self.mesh_one.elements:
if element.global_id not in self.mesh_two.global_id_dictionary.keys():
element_dyed = True
if element.check_if_on_boundary():
element_dyed = False
else:
adjacent_element_ids = element.get_ids_of_adjacent_elements()
for frame_id in adjacent_element_ids:
adjacent_global_id = self.mesh_one.get_element_with_frame_id(frame_id).global_id
if adjacent_global_id not in self.mesh_two.global_id_dictionary.keys():
element_dyed = False
break
if element_dyed:
self.number_dying_cells +=1
self.global_ids_of_dying_cells.append(element.global_id)
def generate_centroid_statistics(self):
"""Collect statistics on how much centroids move"""
centroid_displacements = []
for element in self.mesh_one.elements:
if element.global_id in self.mesh_two.global_id_dictionary.keys():
second_element_centroid = self.mesh_two.get_element_with_global_id(element.global_id).calculate_centroid()
centroid_displacements.append(np.linalg.norm(second_element_centroid -
element.calculate_centroid()))
centroid_displacements_np = np.array(centroid_displacements)
self.centroid_displacements = centroid_displacements_np
centroid_displacements_rescaled = centroid_displacements_np/np.sqrt(self.mesh_one.calculate_average_element_area())
self.maximal_centroid_displacement = np.max(centroid_displacements_rescaled)
self.minimal_centroid_displacement = np.min(centroid_displacements_rescaled)
self.average_centroid_displacement = np.mean(centroid_displacements_rescaled)
def generate_edge_difference_statistics(self):
"""Collect statistics on how many cells gain vs loose edges in this step"""
self.number_of_cells_gaining_edges = 0
self.number_of_cells_loosing_edges = 0
for element in self.mesh_one.elements:
if element.global_id in self.mesh_two.global_id_dictionary.keys():
second_element = self.mesh_two.get_element_with_global_id(element.global_id)
if element.get_num_nodes() > second_element.get_num_nodes():
self.number_of_cells_gaining_edges += 1
elif element.get_num_nodes() < second_element.get_num_nodes():
self.number_of_cells_loosing_edges += 1
class PostProcessor():
"""An object to postprocess a maximum common subgraph and identify rearrangements"""
def __init__(self, mesh_one, mesh_two, largest_mappings ):
"""The constructor of the post processor
Parameters
----------
mesh_one : Mesh instance
the first frame represented as mesh
mesh_two : Mesh instance
the second frame represented as mesh
largest_mappings : list of dictionaries
the list of equivalent largest mappings that the subgraph finder returned
"""
self.largest_mappings = largest_mappings
self.mapped_ids = []
"""All currently present global ids"""
self.mesh_one = mesh_one
self.network_one = mesh_one.generate_network()
self.mesh_two = mesh_two
self.network_two = mesh_two.generate_network()
self.preliminary_mappings = {}
"""A dictionary of the same style as TrackingState.id_map. Keys are mesh_one frame ids
and values are mesh_two frame_ids"""
def get_multiple_images( self, list_of_arguments, preliminary_mapping = {} ):
"""Get a list of all images of the given arguments.
Parameters
----------
list_of_arguments : list of ints
list containing frame_ids in mesh_one
preliminary_mapping : dict
mapping of cells between the two frames for which the global ids have not yet been set
Returns
-------
list_of_images : list of ints
list containing all frame_ids in mesh_two of elements that are images of frame_ids
in list_of_arguments
"""
list_of_images = []
for frame_id in list_of_arguments:
global_id = self.mesh_one.get_element_with_frame_id(frame_id).global_id
if global_id is not None:
list_of_images.append(self.mesh_two.get_element_with_global_id(global_id).id_in_frame )
else:
list_of_images.append(preliminary_mapping[frame_id])
return list_of_images
def post_process_with_data(self):
"""Post process the maximum common subgraph, 'fill in the gaps',
and return the full list of global ids
Identifies T1 Swaps and maps the involved cells
Returns
-------
global_ids : list if ints
list of all global ids present after post-processing
"""
# self.index_global_ids_from_largest_mappings()
network_one = self.mesh_one.generate_network_of_unidentified_elements()
self.stable_fill_in_by_adjacency()
self.resolve_division_events()
self.index_global_ids()
return self.mapped_ids
def stable_fill_in_by_adjacency(self):
"""Fill in untracked elements.
This method sets up a registry of untracked cells and how many tracked neighbours they have.
This registry is saved under self.connectivity_vector, which safes for each element
in the first mesh the number of tracked neighbours in the first mesh.
Based on this registry it will attempt to map cells in a way that maximises the number of preserved
neighbours upon tracking.
This is achieved by combining self.connectivity_vector with a boolean vector
self.actual_connectivy_tested that saves whether the number of preserved neighbours under the best
possible mapping has been found. The method also uses a current_best_match that has the
connectivity self.maximal actual connectivity.
"""
self.make_connectivity_vector()
extension_found_with_relaxed_condition = True
while extension_found_with_relaxed_condition:
mapping_has_changed = True
while mapping_has_changed:
old_mapping = self.preliminary_mappings.copy()
self.already_inspected_cells = np.zeros_like(self.connectivity_vector, dtype = 'bool')
while self.check_mapping_is_extendible():
self.maximal_actual_connectivity = 0
self.current_best_match = None
self.actual_connectivity_tested = np.zeros_like( self.connectivity_vector, dtype = 'bool' )
while ( self.get_maximal_connectivity() > self.maximal_actual_connectivity and
self.get_maximal_connectivity() > 1 ):
next_frame_id = self.pick_next_cell()
mapping_candidate, actual_connectivity = self.alternative_find_safe_mapping_candidate_for_single_cell( next_frame_id )
element_index = self.mesh_one.frame_id_dictionary[next_frame_id]
self.actual_connectivity_tested[element_index] = True
if mapping_candidate is not None:
if actual_connectivity > self.maximal_actual_connectivity:
self.maximal_actual_connectivity = actual_connectivity
self.current_best_match = ( next_frame_id, mapping_candidate )
else:
self.already_inspected_cells[element_index] = True
if self.current_best_match is not None:
self.extend_preliminary_mapping( self.current_best_match[0], self.current_best_match[1] )
if self.preliminary_mappings == old_mapping:
mapping_has_changed = False
else:
mapping_has_changed = True
self.already_inspected_cells = np.zeros_like(self.connectivity_vector, dtype = 'bool')
self.maximal_actual_connectivity = 0
self.current_best_match = None
self.actual_connectivity_tested = np.zeros_like( self.connectivity_vector, dtype = 'bool' )
while ( self.get_maximal_connectivity() > self.maximal_actual_connectivity and
self.get_maximal_connectivity() > 1 ):
next_frame_id = self.pick_next_cell()
mapping_candidate, actual_connectivity = self.alternative_find_safe_mapping_candidate_for_single_cell( next_frame_id, relaxed_condition = True )
element_index = self.mesh_one.frame_id_dictionary[next_frame_id]
self.actual_connectivity_tested[element_index] = True
if mapping_candidate is not None:
if actual_connectivity >= 2:
self.maximal_actual_connectivity = actual_connectivity
self.current_best_match = ( next_frame_id, mapping_candidate )
else:
self.already_inspected_cells[element_index] = True
if self.current_best_match is not None:
self.extend_preliminary_mapping( self.current_best_match[0], self.current_best_match[1] )
extension_not_yet_found = False
extension_found_with_relaxed_condition = True
else:
extension_found_with_relaxed_condition = False
def get_maximal_connectivity(self):
"""Helper method for stable_fill_in_by_adjacency. It it returns
the maximal connectivity to the mcs among cells that have not yet been inspected
for actual connectivity, i.e. the possible number of preserved neighbours under the
best-possible mapping.
Returns
-------
maximal_connectivity : int
maximal connectivity among not yet inspected cells.
"""
not_yet_visited_cells = np.logical_and( self.already_inspected_cells == False, self.actual_connectivity_tested == False )
maximal_connectivity = np.max( self.connectivity_vector[not_yet_visited_cells])
return maximal_connectivity
def pick_next_cell(self):
"""Pick a next cell for inspection for actual connectivity
Returns a cell that has not yet been inspected and for which the actual connectivity has not yet
been tested.
Returns
-------
next_cell : int
frame id of the cell that is to be inspected next.
"""
maximal_connectivity = self.get_maximal_connectivity()
assert(maximal_connectivity > 1)
not_yet_visited_cells = np.logical_and( self.already_inspected_cells == False, self.actual_connectivity_tested == False )
possible_indices = np.where( np.logical_and(self.connectivity_vector == maximal_connectivity,
not_yet_visited_cells ) )
next_frame_id = self.mesh_one.elements[possible_indices[0][0]].id_in_frame
return next_frame_id
def check_mapping_is_extendible(self):
"""Returns whether the current mapping is extendible.
Returns True if there are any cells that have not yet been inspected and for which
the connectivity is larger than one
Returns
-------
mapping_is_extendible : bool
True if the mapping is extendible.
"""
mapping_is_extendible = np.sum(np.logical_and( self.already_inspected_cells == False,
self.connectivity_vector > 1 )) > 0
return mapping_is_extendible
def make_connectivity_vector(self):
"""Make a connectivity vector. The connectivity vector is used
throughout the method stable_fill_in_by_adjacency. For each cell in the first
mesh it saves an integer number denoting how many tracked neighbours that cell has.
The connectivity vector is stored as a member variable of the post processor.
"""
connectivity_vector = np.zeros(self.mesh_one.get_num_elements(), dtype = 'int')
for counter, element in enumerate(self.mesh_one.elements):
if element.global_id is None:
full_set_of_currently_mapped_neighbours = self.mesh_one.get_already_mapped_adjacent_element_ids( element.id_in_frame )
connectivity_vector[counter] = len(full_set_of_currently_mapped_neighbours)
else:
connectivity_vector[counter] = 0
self.connectivity_vector = connectivity_vector
def extend_preliminary_mapping(self, next_frame_id, mapping_candidate):
"""Extend the preliminary mapping.
Once stable_fill_in_by_adjacency has found a new mapping this method is called to
add the mapping to the preliminary mapping.
It will update the connectivity vector for any cells around the cell corresponding
to next_frame_id and reset their already_inspected vector.
Parameters
----------
next_frame_id : int
frame id of cell in first mesh that is to be mapped
mapping_candidate : int
frame id of cell in second mesh that is to be mapped
"""
centroid_position = self.mesh_two.get_element_with_frame_id(mapping_candidate).calculate_centroid()
new_centroid_position = np.array(centroid_position)
new_centroid_position[1] = 326 - centroid_position[1]
assert(next_frame_id not in self.preliminary_mappings)
self.preliminary_mappings[next_frame_id] = mapping_candidate
new_neighbour_ids = self.mesh_one.get_not_yet_mapped_shared_neighbour_ids( [next_frame_id],
self.preliminary_mappings.keys() )
element_index = self.mesh_one.frame_id_dictionary[next_frame_id]
self.connectivity_vector[element_index] = 0
for neighbour_id in new_neighbour_ids:
element_index = self.mesh_one.frame_id_dictionary[neighbour_id]
self.connectivity_vector[element_index] += 1
self.already_inspected_cells[element_index] = False
def alternative_find_safe_mapping_candidate_for_single_cell(self, frame_id, relaxed_condition = False ):
"""This method finds a possible mapping candidate for a single cell.
It is a helper method of stable_fill_in_by_adjacency.
It returns a mapping candidate if the number of gained tracked neighbours is less than
the number of preserved neighbours - 1. If relaxed_condition is True,
it returns a mapping candidate if the number of gained tracked neighbours is
less than the number of preserved tracked neighbours.
Parameters
----------
frame_id : int
integer of the cell for which we try to find a mapping candidate
relaxed_condition : bool
If True, the number of gained tracked neighbours must be less than
the number of preserved tracked neighbours. If False, the number
of gained tracked neighbours must be less than the number
of preserved tracked neighbours - 1.
Returns
-------
mapping_candidate : int
frame id in second mesh that indicates the mapping candidate
current_neighbour_number : int
number of preserved neighbours
"""
mapping_candidate = None
element_one = self.mesh_one.get_element_with_frame_id(frame_id)
if ( frame_id not in self.preliminary_mappings ):
full_set_of_currently_mapped_neighbours = self.mesh_one.get_already_mapped_adjacent_element_ids( frame_id,
self.preliminary_mappings.keys() )
# get mapping candidates by all shared neighbours of currently mapped neighbours
images_of_already_mapped_neighbours = self.get_multiple_images( full_set_of_currently_mapped_neighbours,
self.preliminary_mappings )
mapping_candidates = self.mesh_two.get_not_yet_mapped_shared_neighbour_ids( images_of_already_mapped_neighbours,
self.preliminary_mappings.values() )
full_neighbour_number = len( full_set_of_currently_mapped_neighbours )
current_neighbour_number = len( full_set_of_currently_mapped_neighbours )
if len(mapping_candidates) == 0:
mapping_candidates = set()
old_reduced_image_sets = [images_of_already_mapped_neighbours]
while ( ( len(mapping_candidates) == 0 ) and
( current_neighbour_number > 2 ) ):
# They don't have a shared neighbour, see whether we can get better mapping candidates if we take one of the
# mapped neighbours out to allow for rearrangement
new_reduced_image_sets = []
for image_set in old_reduced_image_sets:
for image in image_set:
reduced_images_of_already_mapped_neighbours = [item for item in image_set
if item != image ]
mapping_candidates.update( self.mesh_two.get_not_yet_mapped_shared_neighbour_ids( reduced_images_of_already_mapped_neighbours,
self.preliminary_mappings.values() ))
new_reduced_image_sets.append(list(reduced_images_of_already_mapped_neighbours))
current_neighbour_number = current_neighbour_number - 1
old_reduced_image_sets = list(new_reduced_image_sets)
filtered_mapping_candidates = []
for candidate in mapping_candidates:
additional_neighbour_count = self.get_additional_neighbour_count( candidate, images_of_already_mapped_neighbours,
self.preliminary_mappings.values() )
element_two = self.mesh_two.get_element_with_frame_id(candidate)
if relaxed_condition:
if additional_neighbour_count < full_neighbour_number:
filtered_mapping_candidates.append( candidate )
else:
if additional_neighbour_count < full_neighbour_number - 1:
filtered_mapping_candidates.append( candidate )
if len(filtered_mapping_candidates) == 1:
mapping_candidate = filtered_mapping_candidates[0]
return mapping_candidate, current_neighbour_number
def find_safe_mapping_candidate_for_single_cell(self, frame_id, preliminary_mapping, min_neighbour_number = 3 ):
"""Finds a mapping candidate for the cell with frame_id
Helper to altered_fill_in_by_adjacency which only gets calles upon division resolution.
Parameters
----------
frame_id : int
frame_id of cell in network one for which a mapping candidate is needed
preliminary_mapping : dict
existing mappings from network one to network 2
min_neighbour_number : int
minimal number or connections to already mapped neighbours that the new mapping needs to preserve
Returns
-------
mapping_candidate : int
frame_id in network two that has minimal_number_of_connections to already mapped neighbours
of the element in mesh_one with frame_id. Returns None if no mapping candidate could be found.
"""
mapping_candidate = None
# loop over the nodes in the connected component_one
element_one = self.mesh_one.get_element_with_frame_id(frame_id)
if ( frame_id not in preliminary_mapping ):
full_set_of_currently_mapped_neighbours = self.mesh_one.get_already_mapped_adjacent_element_ids( frame_id,
preliminary_mapping.keys() )
if len( full_set_of_currently_mapped_neighbours ) >= min_neighbour_number:
# get mapping candidates by all shared neighbours of currently mapped neighbours
images_of_already_mapped_neighbours = self.get_multiple_images( full_set_of_currently_mapped_neighbours,
preliminary_mapping )
mapping_candidates = self.mesh_two.get_not_yet_mapped_shared_neighbour_ids( images_of_already_mapped_neighbours,
preliminary_mapping.values() )
if len(mapping_candidates) == 0:
mapping_candidates = set()
current_neighbour_number = len( full_set_of_currently_mapped_neighbours )
old_reduced_image_sets = [images_of_already_mapped_neighbours]
while ( len(mapping_candidates) == 0 and current_neighbour_number > min_neighbour_number ):
# They don't have a shared neighbour, see whether we can get better mapping candidates if we take one of the
# mapped neighbours out to allow for rearrangement
new_reduced_image_sets = []
for image_set in old_reduced_image_sets:
for image in image_set:
reduced_images_of_already_mapped_neighbours = [item for item in image_set
if item != image ]
assert( len( reduced_images_of_already_mapped_neighbours ) >= min_neighbour_number )
mapping_candidates.update( self.mesh_two.get_not_yet_mapped_shared_neighbour_ids( reduced_images_of_already_mapped_neighbours,
preliminary_mapping.values() ))
new_reduced_image_sets.append(list(reduced_images_of_already_mapped_neighbours))
current_neighbour_number = current_neighbour_number - 1
old_reduced_image_sets = list(new_reduced_image_sets)
filtered_mapping_candidates = []
for candidate in mapping_candidates:
additional_neighbour_count = self.get_additional_neighbour_count( candidate, images_of_already_mapped_neighbours,
preliminary_mapping.values() )
element_two = self.mesh_two.get_element_with_frame_id(candidate)
polygon_numbers_add_up = element_two.get_num_nodes() < ( element_one.get_num_nodes() + additional_neighbour_count + 2 )
if additional_neighbour_count < 3 and additional_neighbour_count < min_neighbour_number and polygon_numbers_add_up:
filtered_mapping_candidates.append( candidate )
if len(filtered_mapping_candidates) == 1:
mapping_candidate = filtered_mapping_candidates[0]
return mapping_candidate
def get_additional_neighbour_count(self, candidate_id, expected_neighbours, mapped_cells):
"""See how many additional neighbours the cell with candidate_id in mesh_two has (within all already mapped cells).
Parameters
----------
candidate_id : int
id_in_frame of cell in mesh_two
expected_neighbours : list of ints
cells in mesh two that we expect to be neighbours of candidate
mapped_cells : list of ints
frame ids in mesh two that have been mapped but whose global ids have not been set
Returns
-------
additional_neighbour_count : int
number of mapped neighbours of element with candidate_id that are not in expected_neighbours
"""
additional_neighbour_count = 0
candidates_mapped_neighbours = self.mesh_two.get_already_mapped_adjacent_element_ids( candidate_id,
mapped_cells )
for neighbour in candidates_mapped_neighbours:
if neighbour not in expected_neighbours:
additional_neighbour_count += 1
return additional_neighbour_count
def altered_fill_in_by_adjacency(self, network_one):
"""Fill in unmapped cells by adjacency to existing mapping.
Takes a network of unmapped cells in the first mesh,
and fills in the cell-to-cell mapping between them based on adjacency
with already mapped cells. This method has been replaced by stable_fill_in_by_adjacency
and is now only used in the division resolution step.
Parameters
----------
network_one : networkx Graph instance
subgraph of the network corresponding to mesh_one
"""
preliminary_mappings = self.altered_get_mappings_by_adjacency(network_one)
for node in preliminary_mappings:
self.preliminary_mappings[node] = preliminary_mappings[node]
def altered_get_mappings_by_adjacency(self, connected_component_one):
"""Gets a preliminary mapping based on the adjacency to already mapped nodes.
Helper method for fill_in_by_adjacency and identify_division_event.
Same as altered_fill_in_by_adjacency this method is now only used in the division resolution step
Parameters
----------
connected_component_one : networkx Graph instance
subgraph of the network corresponding to mesh_one. network of ummapped cells
Returns
-------
preliminary_mapping : dict
keys are frame ids in mesh_one, values are frame_ids in mesh_two
"""
preliminary_mapping = {}
self.extend_current_preliminary_mapping(connected_component_one, preliminary_mapping, minimal_number_of_neighbours=4)
self.extend_current_preliminary_mapping(connected_component_one, preliminary_mapping, minimal_number_of_neighbours=3)
self.extend_current_preliminary_mapping(connected_component_one, preliminary_mapping, minimal_number_of_neighbours=2)
# self.extend_current_preliminary_mapping(connected_component_one, preliminary_mapping, minimal_number_of_neighbours=1)
return preliminary_mapping
def extend_current_preliminary_mapping(self, network_one, preliminary_mapping, minimal_number_of_neighbours=3):
"""This fills in any unmapped nodes in network one into preliminary mapping, ensuring
that any new mapping has at least minimal_number_of_neighbours tracked neighbours.
As submethod to altered_fill_in_by_adjacency this method only gets called upon division resolution.
Parameters
----------
network_one : networkx.Graph instance
network of unmapped frame ids in mesh one
preliminary_mapping : dict int->int
already known mappings from network one
minimal_number_of_neighbours : int
the minimum number of connections to already mapped cells that the mapping needs to preserve.
"""
attempted_fill_in_counter = {}
for node in network_one.nodes():
attempted_fill_in_counter[node] = 0
not_all_neighbours_mapped = True
while not_all_neighbours_mapped:
not_all_neighbours_mapped = False
for node in network_one.nodes():
if node not in preliminary_mapping and node not in self.preliminary_mappings:
mapping_candidate = self.find_safe_mapping_candidate_for_single_cell( node, preliminary_mapping,
minimal_number_of_neighbours )
if mapping_candidate is not None and mapping_candidate not in preliminary_mapping.values():
preliminary_mapping[node] = mapping_candidate
else:
# this element is still not uniquely identifiable. If all its neighbours have been mapped, then
# this means that it actually does not exist in mesh 2, so we stop looking for a match.
# otherwise, try again.
if len(self.mesh_one.get_element_with_frame_id(node).get_ids_of_adjacent_elements() ) > 2:
not_yet_mapped_neighbours = self.mesh_one.get_not_yet_mapped_shared_neighbour_ids([ node ])
no_not_yet_mapped_neighbours = 0
for neighbour_id in not_yet_mapped_neighbours:
if neighbour_id not in preliminary_mapping:
no_not_yet_mapped_neighbours += 1
if no_not_yet_mapped_neighbours > 0 and attempted_fill_in_counter[node] < 5:
not_all_neighbours_mapped = True
attempted_fill_in_counter[node] += 1
def tidy_current_mapping(self):
"""This function resets all global id's that only have one connection to the current maximum common subgraph, or
two isolated connections, or or members of a small extension to the mcs that contains maximally three cells and
has only one connection to the mcs, or connected components of less than ten members.
"""
isolated_vector = np.zeros( len(self.mesh_one.elements), dtype = 'bool' )
for element_counter, element in enumerate( self.mesh_one.elements ):
if element.global_id is not None:
# if element.global_id == 166:
# import pdb; pdb.set_trace()
if self.is_isolated( element ):
isolated_vector[ element_counter ] = True
mapped_neighbours = self.mesh_one.get_already_mapped_adjacent_element_ids( element.id_in_frame )
if len(mapped_neighbours) == 2:
first_neighbour_element = self.mesh_one.get_element_with_frame_id( mapped_neighbours[0] )
second_neighbour_element = self.mesh_one.get_element_with_frame_id( mapped_neighbours[1] )
if self.is_isolated(first_neighbour_element) or self.is_isolated(second_neighbour_element):
isolated_vector[element_counter] = True
self.remove_global_ids_by_boolean_mask(isolated_vector)
isolated_vector[:] = False
# Now, let's deal with connected components
network_one = self.mesh_one.generate_network_of_identified_elements()
connected_components_in_network_one = list( nx.connected_component_subgraphs(network_one) )
# import pdb; pdb.set_trace()
for connected_component in connected_components_in_network_one:
if len(connected_component) < 10:
for frame_id in connected_component:
index = self.mesh_one.frame_id_dictionary[frame_id]
isolated_vector[index] = True
self.remove_global_ids_by_boolean_mask(isolated_vector)
self.reindex_global_ids()
#
# apply reduced_mcs flags:
for element in self.mesh_one.elements:
if element.global_id in self.mapped_ids:
element.is_in_reduced_mcs_next = True
else:
element.is_in_reduced_mcs_next = False
for element in self.mesh_two.elements:
if element.global_id in self.mapped_ids:
element.is_in_reduced_mcs_previous = True
else:
element.is_in_reduced_mcs_previous = False
def reindex_global_ids(self):
"""Reindexes the global ids such that the maximal global id corresponds
to the total number of tracked cells. This method ensures a contiuous count
of global ids.
"""
# currently, the mapped ids are not a continuous count, let's change that
new_mapped_ids = []
for counter, mapped_id in enumerate(self.mapped_ids):
self.mesh_one.get_element_with_global_id(mapped_id).global_id = counter
self.mesh_two.get_element_with_global_id(mapped_id).global_id = counter
new_mapped_ids.append(counter)
# index the change
self.mesh_one.index_global_ids()
self.mesh_two.index_global_ids()
self.mapped_ids = new_mapped_ids
def remove_global_ids_by_boolean_mask(self, boolean_mask):
"""Remove global ids from all elements for which boolean_map is True
Parameters
----------
boolean_map : nd_array, dtype = 'bool'
mask for elements in the mesh_one elements vector for which we plan to remove the global ids
"""
for element_counter, element in enumerate( self.mesh_one.elements ):
if boolean_mask[ element_counter ]:
this_global_id = element.global_id
self.mesh_two.get_element_with_global_id(this_global_id).global_id = None
element.global_id = None
del self.largest_mappings[0][element.id_in_frame]
self.mapped_ids.remove(this_global_id)
# index the change
self.mesh_one.index_global_ids()
self.mesh_two.index_global_ids()
def is_isolated(self, element):
"""This function determines whether the element is isolated in mesh_one or not.
Parameters
----------
element : mesh.Element instance
a element in a mesh, has to be an element in mesh_one
Returns
-------
is_isolated : bool
True if the element is isolated
"""
adjacent_elements = element.get_ids_of_adjacent_elements()
already_mapped_adjacent_elements = []
for element_id in adjacent_elements:
if self.mesh_one.get_element_with_frame_id(element_id).global_id is not None:
already_mapped_adjacent_elements.append(element_id)
if len( already_mapped_adjacent_elements ) == 1 or len(already_mapped_adjacent_elements) == 0:
is_isolated = True
elif len( already_mapped_adjacent_elements ) == 2:
if not self.network_one.has_edge( already_mapped_adjacent_elements[0], already_mapped_adjacent_elements[1]):
is_isolated = True
else:
is_isolated = False
elif len( already_mapped_adjacent_elements ) == 3:
number_edges = 0
if self.network_one.has_edge( already_mapped_adjacent_elements[0], already_mapped_adjacent_elements[1]):
number_edges+=1
if self.network_one.has_edge( already_mapped_adjacent_elements[1], already_mapped_adjacent_elements[2]):
number_edges+=1
if self.network_one.has_edge( already_mapped_adjacent_elements[0], already_mapped_adjacent_elements[2]):
number_edges+=1
if number_edges < 2:
is_isolated = True
else:
is_isolated = False
else:
is_isolated = False
return is_isolated
def index_global_ids(self):
"""add the preliminary mapping to the meshes, i.e. fill in the global ids
for all mapped cells"""
# import pdb; pdb.set_trace()
for element_one_id in self.preliminary_mappings:
current_maximal_global_id = max( self.mapped_ids )
new_global_id = current_maximal_global_id + 1
element_one = self.mesh_one.get_element_with_frame_id(element_one_id)
element_one.global_id = new_global_id
element_two = self.mesh_two.get_element_with_frame_id(self.preliminary_mappings[element_one_id])
element_two.global_id = new_global_id
self.mapped_ids.append(new_global_id)
self.mesh_one.index_global_ids()
self.mesh_two.index_global_ids()
self.reindex_global_ids()
def index_global_ids_from_largest_mappings(self):
"""Index global ids using all mappings that are contained in all largest mappings"""
preserved_mappings = {}
for key in self.largest_mappings[0]:
pair_is_in_other_mappings = True
value = self.largest_mappings[0][key]
for mapping in self.largest_mappings:
if key not in mapping:
pair_is_in_other_mappings = False
break
elif mapping[key] != value:
pair_is_in_other_mappings = False
break
if pair_is_in_other_mappings:
preserved_mappings[key] = value
for global_id, frame_one_id in enumerate(preserved_mappings):
self.mesh_one.get_element_with_frame_id(frame_one_id).global_id = global_id
self.mesh_two.get_element_with_frame_id(self.largest_mappings[0][frame_one_id]).global_id = global_id
# if global_id == 166:
# import pdb; pdb.set_trace();
self.mapped_ids.append(global_id)
self.mesh_two.index_global_ids()
self.mesh_one.index_global_ids()
def identify_division(self, connected_component_one, connected_component_two):
"""Identifies the mother and daughter cells of a division event, and adds the
remaining cells to the preliminary mapping.
Parameters
----------
connected_component_one : networkx Graph instance
subgraph of the network corresponding to mesh_one
connected_component_two : networkx Graph instance
subgraph of the network corresponding to mesh_two
"""
mappings_based_on_adjacency = self.altered_get_mappings_by_adjacency(connected_component_one)
# mappings_based_on_adjacency = self.get_mappings_by_adjacency(connected_component_one, connected_component_two)
bordering_cells_mapping = self.find_bordering_cells_of_division( mappings_based_on_adjacency )
potential_mother_cells = self.mesh_one.get_not_yet_mapped_shared_neighbour_ids( bordering_cells_mapping.keys() )
mother_cell = None
daughter_cells = None
if len(potential_mother_cells) == 0:
# In this case one of the daughter cells is triangular.
# In this case it is not possible to say by adjacency only which cell is the mother cell,
# Need to make geometric argument
new_potential_mother_cells = bordering_cells_mapping.keys()
potential_daughter_cells = bordering_cells_mapping.values()
# add the triangular cell
# this `+' is a list concatenation
potential_daughter_cells += self.mesh_two.get_not_yet_mapped_shared_neighbour_ids( bordering_cells_mapping.values() )
mother_cell, daughter_cells = self.identify_division_event(new_potential_mother_cells, potential_daughter_cells,
mappings_based_on_adjacency)
connected_component_one.remove_node( mother_cell )
connected_component_two.remove_nodes_from( daughter_cells )
self.altered_fill_in_by_adjacency( connected_component_one )
elif len(potential_mother_cells) == 1:
potential_mother_cell = potential_mother_cells[0]
if potential_mother_cell in mappings_based_on_adjacency:
del mappings_based_on_adjacency[potential_mother_cell]
for frame_id in mappings_based_on_adjacency:
self.preliminary_mappings[frame_id] = mappings_based_on_adjacency[frame_id]
else:
potential_daughter_cells = self.mesh_two.get_not_yet_mapped_shared_neighbour_ids( bordering_cells_mapping.values() )
# assert ( len(potential_daughter_cells) > 1)
if len( potential_daughter_cells ) <= 1 :
raise Exception("could not resolve division event")
elif len(potential_daughter_cells) == 3:
mother_cell, daughter_cells = self.identify_division_event(potential_mother_cells, potential_daughter_cells,
mappings_based_on_adjacency)
#
connected_component_one.remove_node( mother_cell )
connected_component_two.remove_nodes_from( daughter_cells )
#
self.altered_fill_in_by_adjacency( connected_component_one )
elif len(potential_daughter_cells) == 4 :
self.altered_fill_in_by_adjacency( connected_component_one )
else:
raise Exception("could not resolve division event")
# if mother_cell is not None and daughter_cells is not None and daughter_cells != 12:
# division_resolved = True
# else:
# division_resolved = False
def find_bordering_cells_of_division(self, preliminary_mapping):
"""Find the bordering cells of a division in a preliminary mapping. Looks for cells that gain an edge
in the mapping.
Parameters
----------
preliminary_mapping : dict
keys are frame ids in mesh_one, values are frame_ids in mesh_two. This preliminary mapping must contain the cells
adjacent to the dividing cell.
Returns
-------
bordering_cells : dict
mapping of the cells adjacent to the division
"""
bordering_cells = {}
for cell_one in preliminary_mapping:
num_edges_one = self.mesh_one.get_element_with_frame_id(cell_one).get_num_nodes()
num_edges_two = self.mesh_two.get_element_with_frame_id(preliminary_mapping[cell_one]).get_num_nodes()
if num_edges_two == num_edges_one + 1:
bordering_cells[cell_one] = preliminary_mapping[cell_one]
return bordering_cells
def identify_division_event(self, potential_mother_cells, potential_daughter_cells, preliminary_mapping ):
"""Identify which of the potential mother cells and potential daughter cells are
the actual mother and daughter cells of the division
Parameters
----------
potential_mother_cells : list
list of frame ids in mesh_one of potential mother cells
potential_daughter cells : list
list of frame ids in mesh_two of potential daughter cells
preliminary_mapping : dict
preliminary mapping that contains at least the two mother cells
Returns
-------
mother_cell : int
frame_id of the mother cell in mesh_one
daughter_cells : list
list containing the frame ids of the two daughter cells of the division
"""
definite_daughter_cell_set = self.mesh_two.get_inclusive_not_yet_mapped_shared_neighbour_ids(potential_daughter_cells)
# following if statement is to cover case of triangular cells
if len( definite_daughter_cell_set ) == 1:
definite_daughter_cell = definite_daughter_cell_set.pop()
elif len( definite_daughter_cell_set ) == 4:
# Only one of the provided cells will be triangular
# if you reached this position in the code
for frame_id in definite_daughter_cell_set:
if self.mesh_two.get_element_with_frame_id(frame_id).get_num_nodes() == 3:
definite_daughter_cell = frame_id
break
else:
raise Exception("could not resolve division event")
if definite_daughter_cell is None or definite_daughter_cell == 0 :
raise Exception("could not resolve division event")
if len(potential_daughter_cells) <= 1 :
raise Exception("could not resolve division event")
potential_daughter_cells.remove( definite_daughter_cell )
inverse_preliminary_mapping = { value: key for key, value in preliminary_mapping.items() }
closest_centroid_distance = sys.float_info.max
for frame_id in potential_daughter_cells:
merged_element = self.merge_elements( self.mesh_two.get_element_with_frame_id(definite_daughter_cell),
self.mesh_two.get_element_with_frame_id(frame_id) )
merged_centroid = merged_element.calculate_centroid()
this_mother_cell = self.mesh_one.get_element_with_frame_id(inverse_preliminary_mapping[frame_id])
this_distance = np.linalg.norm(merged_centroid - this_mother_cell.calculate_centroid())
if this_distance < closest_centroid_distance:
definite_mother_cell = this_mother_cell.id_in_frame
second_definite_daughter_cell = frame_id
closest_centroid_distance = this_distance
return definite_mother_cell, [definite_daughter_cell, second_definite_daughter_cell]
def resolve_division_events(self):
"""Resolve division events.
This method will find all connected components of untracked cells in the second mesh.
If a connected component is not at the boundary the mothod
resolve_division_event_for_connected_component is called to attempt to resolve
the division.
"""
# for frame_one_id in self.largest_mappings[0]:
# self.preliminary_mappings[frame_one_id] = self.largest_mappings[0][frame_one_id]
# self.preliminary_mappings = copy.copy(self.largest_mappings[0])
# first, identify any cells that are in network two but are not mapped
network_two = self.mesh_two.generate_network_of_unidentified_elements(self.preliminary_mappings.values())
connected_components_in_network_two = list( nx.connected_component_subgraphs(network_two) )
for connected_component in connected_components_in_network_two:
#check whether component is at mesh boundary:
component_is_on_boundary = False
for node in connected_component:
if self.mesh_two.get_element_with_frame_id(node).check_if_on_boundary():
component_is_on_boundary = True
break
if not component_is_on_boundary:
self.resolve_division_event_for_connected_component(connected_component)
# self.reindex_global_ids()
# then, get all their neighbouring cells, and all inverse images of neighbouring cells
# make a connected component out of both
# remove both from preliminary mappings
# identify division event on both connected components
def resolve_division_event_for_connected_component(self, connected_component):
"""This method will extend the connected component in network two by all it's
first-order adjacent elements. It will then find the corresponding tracked elements
to these adjacent elements in the first mesh. It will then construct a connected
component of the corresponding elements in the first mesh and subsequently
add any of their shared neighbours.
Finally, it will remove all tracked cells in the first connected component from the
preliminary mapping and pass both connected components to the method identify_division.
If identify_division fails a warning is given the preliminary mapping is returned to it's
oroginal state. This means that the preliminar mapping remains unaltered if division resolution
fails.
Parameters
----------
connected_component : list of ints
list of frame ids of elements in network two that form a connected component.
"""
# collect_cells_for_connected_component_two
adjacent_elements = []
for node in connected_component:
this_element = self.mesh_two.get_element_with_frame_id(node)
if not this_element.check_if_on_boundary():
adjacent_elements += this_element.get_ids_of_adjacent_elements()
else:
print 'element to remove is on boundary'
unique_adjacent_elements = np.unique(np.array(adjacent_elements))
preliminary_adjacent_elements = list(set(unique_adjacent_elements).intersection( self.preliminary_mappings.values() ))
mcs_adjacent_elements = list(set(unique_adjacent_elements).intersection( self.largest_mappings[0].values() ))
# collect cells for connected_component_one
inverse_preliminary_mapping = { value : key for key, value in self.preliminary_mappings.items() }
inverse_largest_mapping = { value : key for key, value in self.largest_mappings[0].items() }
inverse_images_of_preliminary_adjacent_elements = [ inverse_preliminary_mapping[frame_id] for
frame_id in preliminary_adjacent_elements]
inverse_images_of_mcs_adjacent_elements = [ inverse_largest_mapping[frame_id] for
frame_id in mcs_adjacent_elements]
unmapped_elements_belonging_to_connected_component_in_network_one = []
for element_id in inverse_images_of_preliminary_adjacent_elements:
unmapped_elements_belonging_to_connected_component_in_network_one += self.mesh_one.get_not_yet_mapped_shared_neighbour_ids([element_id])
for element_id in inverse_images_of_mcs_adjacent_elements:
unmapped_elements_belonging_to_connected_component_in_network_one += self.mesh_one.get_not_yet_mapped_shared_neighbour_ids([element_id])
unmapped_elements_belonging_to_connected_component_in_network_one = list(np.unique(np.array(unmapped_elements_belonging_to_connected_component_in_network_one)))
unmapped_elements_belonging_to_connected_component_in_network_one += inverse_images_of_preliminary_adjacent_elements
unmapped_elements_belonging_to_connected_component_in_network_one += inverse_images_of_mcs_adjacent_elements
unmapped_elements_belonging_to_connected_component_in_network_two = [node for node in connected_component] + preliminary_adjacent_elements + mcs_adjacent_elements
# remove the collected cells from the mapping
old_mappings = dict()
for frame_id in unmapped_elements_belonging_to_connected_component_in_network_one:
if frame_id in self.preliminary_mappings:
old_mappings[frame_id] = self.preliminary_mappings[frame_id]
elif frame_id in self.largest_mappings[0]:
old_mappings[frame_id] = self.largest_mappings[0][frame_id]
global_id = self.mesh_one.get_element_with_frame_id(frame_id).global_id
try:
self.mesh_two.get_element_with_global_id(global_id).global_id = None
self.mapped_ids.remove(global_id)
except KeyError:
pass
self.mesh_one.get_element_with_frame_id(frame_id).global_id = None
try:
del( self.preliminary_mappings[frame_id] )
except KeyError:
pass
self.mesh_one.index_global_ids()
self.mesh_two.index_global_ids()
# make the connected components
connected_component_one = self.network_one.subgraph( unmapped_elements_belonging_to_connected_component_in_network_one )
connected_component_two = self.network_one.subgraph( unmapped_elements_belonging_to_connected_component_in_network_two )
# pass to our connected component function
try:
self.identify_division(connected_component_one, connected_component_two)
except:
warnings.warn("could not resolve division event")
for frame_id in old_mappings:
self.preliminary_mappings[frame_id] = old_mappings[frame_id]
def merge_elements(self, element_one, element_two):
"""Merge two elements into a bigger element, taking out the shared nodes.
This function will leave the nodes untouched, i.e. their information about elements will not be updated.
The original elements will also not be affected.
Parameters
----------
element_one : Element instance
first element that we would like to merge
element_two : Element instance
second element that we would like to merge
Returns
-------
merged_element : Element instance
A new element over the existing nodes. Is not part of the element vectors in the nodes.
"""
new_element_nodes = []
for local_index, node in enumerate(element_one.nodes):
if ( element_one.id_in_frame in node.get_adjacent_element_ids() and
element_two.id_in_frame in node.get_adjacent_element_ids() ):
next_node = element_one.nodes[ (local_index + 1)%element_one.get_num_nodes() ]
if ( element_one.id_in_frame in next_node.get_adjacent_element_ids() and
element_two.id_in_frame in next_node.get_adjacent_element_ids() ):
new_element_nodes.append(node)
one_edge_id = node.id
break
else:
previous_node = element_one.nodes[ element_one.get_num_nodes() - 1 ]
new_element_nodes.append(previous_node)
one_edge_id = previous_node.id
else:
new_element_nodes.append(node)
# we find the local index of the found node in the other cell
for local_index, node in enumerate(element_two.nodes):
if node.id == one_edge_id:
second_element_local_index = local_index
break
# loop through the second element nodes
reached_other_side = False
while reached_other_side == False:
second_element_local_index = ( second_element_local_index + 1 )%element_two.get_num_nodes()
next_node = element_two.nodes[second_element_local_index]
if ( element_one.id_in_frame in next_node.get_adjacent_element_ids() and
element_two.id_in_frame in next_node.get_adjacent_element_ids() ):
new_element_nodes.append(next_node)
second_edge_id = next_node.id
reached_other_side = True
else:
new_element_nodes.append(next_node)
# we find the local index of the found node in the other cell
for local_index, node in enumerate(element_one.nodes):
if node.id == second_edge_id:
first_element_local_index = local_index
break
for local_index in range( first_element_local_index + 1, element_one.get_num_nodes() ):
new_element_nodes.append(element_one.nodes[local_index])
# We add the nodes to the element after instantiation, so that the element is not added to the node
merged_element = mesh.Element([])
merged_element.nodes = new_element_nodes
assert( merged_element.calculate_area() > 0 )
return merged_element
def evaluate_tracking(first_mesh, second_mesh, ground_truth):
"""Evaluate the tracking.
Parameters
----------
first_mesh : Mesh instance
this is a mesh that has global ids in it
second_mesh : Mesh instance
another mesh with global ids in it
ground truth : dictionary, keys and values are integers
Keys are frame ids in first_mesh, values are
frame ids in second_mesh
Returns
-------
success_boolean : bool
True if less than four cells in ground_truth are not tracked,
and if all tracked cells correspond to pairings in ground_truth
number_tracked_cells : int
Number of correctly tracked cells between first_mesh and
second_mesh
Warning
-------
This function is not tested!
"""
correctly_tracked_cells = []
incorrectly_tracked_cells = []
missing_cells = []
for first_element in first_mesh.elements:
# and that the mapping coincides with the ground truth for all tracked ids
first_frame_id = first_element.id_in_frame
if first_frame_id in ground_truth:
if first_element.global_id is None:
missing_cells.append(first_frame_id)
else:
this_global_id = first_element.global_id
second_element = second_mesh.get_element_with_global_id(this_global_id)
second_frame_id = second_element.id_in_frame
if second_frame_id == ground_truth[first_frame_id]:
correctly_tracked_cells.append(first_frame_id)
else:
incorrectly_tracked_cells.append(first_frame_id)
success_boolean = ( len(missing_cells) < 4 and len(incorrectly_tracked_cells) == 0 )
number_tracked_cells = len(correctly_tracked_cells)
number_incorrectly_tracked_cells = len(incorrectly_tracked_cells)
return success_boolean, number_tracked_cells, number_incorrectly_tracked_cells
def find_maximum_common_subgraph(mesh_one, mesh_two):
"""Find a mapping between the cell ids in both frames and assigns the global ids according
to their maximum common subgraph.
Writes global_id entries for all identified elements in both meshes.
Parameters
----------
mesh_one : Mesh type
First mesh
mesh_two : Mesh type
Second mesh
Returns
-------
mapped_ids : dict (int->int)
the ids of elements that were identified in both meshes
"""
subgraph_finder = LocalisedSubgraphFinder(mesh_one, mesh_two)
subgraph_finder.find_maximum_common_subgraph()
post_processor = PostProcessor(mesh_one, mesh_two, subgraph_finder.largest_mappings)
post_processor.tidy_current_mapping()
post_processor.index_global_ids_from_largest_mappings()
mesh_two.index_global_ids()
mesh_one.index_global_ids()
return post_processor.mapped_ids
| bsd-3-clause | 994,204,286,584,167,700 | 46.735653 | 170 | 0.60223 | false | 4.291493 | false | false | false |
procool/myvdl | myvdlclass/plugins/mailru.py | 1 | 3857 | import re
import logging
import subprocess
import sys
import json
from urllib import quote_plus
from myvdlclass.plugins.base import Extention
from myvdlclass.lib.curl import CUrl, HTTPErrorEx
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
class MailRU(Extention):
enabled=True
ident="mailru"
##re_ident = re.compile("""\<meta name="twitter:player" content="(.*?)"\/\>""")
re_ident = re.compile("""\<meta name=".*?" content="https:\/\/rutube\.ru\/play\/embed\/(\d+)"\/\>""")
cookies_jar_file = "/tmp/myvdl-mailru-cookies.jar"
default_headers = {
#'Host': 'mail.ru',
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:49.0) Gecko/20100101 Firefox/49.0',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Language': 'en-US,en;q=0.5',
'Connection': 'keep-alive',
'Upgrade-Insecure-Requests': '1',
'Cache-Control': 'max-age=0',
}
@classmethod
def get_url_re(cls):
return re.compile('^http(s|):\/\/(www\.|my\.|)mail\.ru')
def __init__(self, url, engine, *args, **kwargs):
self.url = url
self.engine = engine
def find_ident(self):
"""
## Get by http://zasasa.com/ru/skachat_video_s_mail.ru.php
##http://my.mail.ru/video/inbox/www.kristina/29/31.html
url = "http://videoapi.my.mail.ru/videos/inbox/www.kristina/29/31.json"
##https://my.mail.ru/v/thisishorosho_tv/video/_groupvideo/769.html
url = "http://videoapi.my.mail.ru/videos/v/thisishorosho_tv/_groupvideo/769.json"
##https://my.mail.ru/list/xakepx/video/199/283.html
url = "http://videoapi.my.mail.ru/videos/list/xakepx/199/283.json"
##https://my.mail.ru/mail/gromow1981/video/_myvideo/1395.html
url = "http://videoapi.my.mail.ru/videos/mail/gromow1981/_myvideo/1395.json"
##https://my.mail.ru/corp/afisha/video/trailers/15375.html
url = "http://videoapi.my.mail.ru/videos/corp/afisha/trailers/15375.json"
"""
url = "http://videoapi.my.mail.ru/videos/"
dt = re.findall("http(?:s|)://my.mail.ru/video/(.*)\.html$", self.url)
if len(dt) > 0:
return url+dt[0]+".json"
dt = re.findall("http(?:s|)://my.mail.ru/(.*)\.html$", self.url)
if len(dt) > 0:
return url+dt[0]+".json"
return None
def start(self):
api_url = self.find_ident()
if api_url is None:
print "MAIL.RU: Unsupported url!"
return None
params = self.curl_get_default_params()
try:
answ = CUrl.download(api_url, 'compressed', **params)
data = json.loads(answ)
#print "DATA", json.dumps(data, indent=4)
except Exception as err:
print "MAIL.RU: Can't load video data, may be wrong url?"
return None
flname = "%s" % re.sub("""[\"\,\.\'\s\t\&\;\$\*]+""", "_", data["meta"]["title"])
hq = 0
url = None
for v in data["videos"]:
hq_ = int(v["key"].replace("p", ""))
if hq_ > hq:
hq = hq_
url = v["url"]
if url is None:
print "MAIL.RU: No video found!"
flext = re.findall("""\/\d+\.(.*?)\?""", url)[0]
flname += ".%s" % flext
print "MAIL.RU: DOWNLOADING:", url
CUrl.download(url, 'globoff', 'compressed', print_status=True, output=flname, **params)
print
print "Saved as: %s" % flname
def curl_get_default_params(self, **kwargs):
params = {
'headers': self.default_headers,
'cookie-jar': self.cookies_jar_file,
'cookie': self.cookies_jar_file,
}
params.update(kwargs)
return params
| bsd-2-clause | 7,366,180,022,331,641,000 | 28.669231 | 105 | 0.552502 | false | 3.171875 | false | false | false |
CiscoSystems/fabric_enabler | dfa/server/services/firewall/native/fw_constants.py | 1 | 6193 | # Copyright 2014 Cisco Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Padmanabhan Krishnan, Cisco Systems, Inc.
#
# Service Constants
import dfa.server.services.constants as services_const
AUTO_NWK_CREATE = True
DEVICE = ''
SCHED_POLICY = 'max_sched'
VLAN_ID_MIN = services_const.VLAN_ID_MIN
VLAN_ID_MAX = services_const.VLAN_ID_MAX
MOB_DOMAIN_NAME = 'md0'
HOST_PROF = 'serviceNetworkUniversalDynamicRoutingESProfile'
HOST_FWD_MODE = 'proxy-gateway'
PART_PROF = 'vrf-common-universal-external-dynamic-ES'
EXT_PROF = 'externalNetworkUniversalDynamicRoutingESProfile'
EXT_FWD_MODE = 'anycast-gateway'
IN_IP_START = '100.100.2.0/24'
IN_IP_END = '100.100.20.0/24'
OUT_IP_START = '200.200.2.0/24'
OUT_IP_END = '200.200.20.0/24'
DUMMY_IP_SUBNET = '9.9.9.0/24'
IN_SERVICE_SUBNET = 'FwServiceInSub'
IN_SERVICE_NWK = 'FwServiceInNwk'
SERV_PART_NAME = 'CTX-ext'
OUT_SERVICE_SUBNET = 'FwServiceOutSub'
OUT_SERVICE_NWK = 'FwServiceOutNwk'
DUMMY_SERVICE_RTR = 'DUMMY_SRVC_RTR'
DUMMY_SERVICE_NWK = 'DUMMY_SRVC_NWK'
TENANT_EDGE_RTR = 'Cisco_TenantEdge'
FW_TENANT_EDGE = 'TE'
FW_CR_OP = 'CREATE'
FW_DEL_OP = 'DELETE'
RESULT_FW_CREATE_INIT = 'FAB_CREATE_PEND'
RESULT_FW_CREATE_DONE = 'FAB_CREATE_DONE'
RESULT_FW_DELETE_INIT = 'FAB_DELETE_PEND'
RESULT_FW_DELETE_DONE = 'FAB_DELETE_DONE'
FW_CONST = 'Firewall'
INIT_STATE_STR = 'INIT'
OS_IN_NETWORK_CREATE_FAIL = 'OS_IN_NETWORK_CREATE_FAIL'
OS_INIT_STATE = OS_IN_NETWORK_CREATE_FAIL
OS_IN_NETWORK_CREATE_SUCCESS = 'OS_IN_NETWORK_CREATE_SUCCESS'
OS_OUT_NETWORK_CREATE_FAIL = 'OS_OUT_NETWORK_CREATE_FAIL'
OS_OUT_NETWORK_CREATE_SUCCESS = 'OS_OUT_NETWORK_CREATE_SUCCESS'
OS_DUMMY_RTR_CREATE_FAIL = 'OS_DUMMY_RTR_CREATE_FAIL'
OS_DUMMY_RTR_CREATE_SUCCESS = 'OS_DUMMY_RTR_CREATE_SUCCESS'
OS_CREATE_SUCCESS = OS_DUMMY_RTR_CREATE_SUCCESS
DCNM_IN_NETWORK_CREATE_FAIL = 'DCNM_IN_NETWORK_CREATE_FAIL'
DCNM_INIT_STATE = DCNM_IN_NETWORK_CREATE_FAIL
DCNM_IN_NETWORK_CREATE_SUCCESS = 'DCNM_IN_NETWORK_CREATE_SUCCESS'
DCNM_IN_PART_UPDATE_FAIL = 'DCNM_IN_PART_UPDATE_FAIL'
DCNM_IN_PART_UPDATE_SUCCESS = 'DCNM_IN_PART_UPDATE_SUCCESS'
DCNM_OUT_PART_CREATE_FAIL = 'DCNM_OUT_PART_CREATE_FAIL'
DCNM_OUT_PART_CREATE_SUCCESS = 'DCNM_OUT_PART_CREATE_SUCCESS'
DCNM_OUT_NETWORK_CREATE_FAIL = 'DCNM_OUT_NETWORK_CREATE_FAIL'
DCNM_OUT_NETWORK_CREATE_SUCCESS = 'DCNM_OUT_NETWORK_CREATE_SUCCESS'
DCNM_OUT_PART_UPDATE_FAIL = 'DCNM_OUT_PART_UPDATE_FAIL'
DCNM_OUT_PART_UPDATE_SUCCESS = 'DCNM_OUT_PART_UPDATE_SUCCESS'
DCNM_CREATE_SUCCESS = DCNM_OUT_PART_UPDATE_SUCCESS
# FABRIC_PREPARE_SUCCESS = DCNM_OUT_PART_UPDATE_SUCCESS
FABRIC_PREPARE_SUCCESS = 'FABRIC_PREPARE_SUCCESS'
OS_IN_NETWORK_DEL_FAIL = 'OS_IN_NETWORK_DEL_FAIL'
OS_IN_NETWORK_DEL_SUCCESS = 'OS_IN_NETWORK_DEL_SUCCESS'
OS_OUT_NETWORK_DEL_FAIL = 'OS_OUT_NETWORK_DEL_FAIL'
OS_OUT_NETWORK_DEL_SUCCESS = 'OS_OUT_NETWORK_DEL_SUCCESS'
OS_DUMMY_RTR_DEL_FAIL = 'OS_DUMMY_RTR_DEL_FAIL'
OS_DUMMY_RTR_DEL_SUCCESS = 'OS_DUMMY_RTR_DEL_SUCCESS'
OS_DEL_SUCCESS = 'OS_DUMMY_RTR_DEL_SUCCESS'
DCNM_IN_NETWORK_DEL_FAIL = 'DCNM_IN_NETWORK_DEL_FAIL'
DCNM_IN_NETWORK_DEL_SUCCESS = 'DCNM_IN_NETWORK_DEL_SUCCESS'
DCNM_IN_PART_UPDDEL_FAIL = 'DCNM_IN_PART_UPDDEL_FAIL'
DCNM_IN_PART_UPDDEL_SUCCESS = 'DCNM_IN_PART_UPDDEL_SUCCESS'
DCNM_OUT_PART_DEL_FAIL = 'DCNM_OUT_PART_DEL_FAIL'
DCNM_OUT_PART_DEL_SUCCESS = 'DCNM_OUT_PART_DEL_SUCCESS'
DCNM_OUT_NETWORK_DEL_FAIL = 'DCNM_OUT_NETWORK_DEL_FAIL'
DCNM_OUT_NETWORK_DEL_SUCCESS = 'DCNM_OUT_NETWORK_DEL_SUCCESS'
DCNM_OUT_PART_UPDDEL_FAIL = 'DCNM_OUT_PART_UPDDEL_FAIL'
DCNM_OUT_PART_UPDDEL_SUCCESS = 'DCNM_OUT_PART_UPDDEL_SUCCESS'
DCNM_DELETE_SUCCESS = DCNM_IN_NETWORK_DEL_SUCCESS
INIT = 0
MAX_STATE = FABRIC_PREPARE_SUCCESS # 17
INIT_STATE = 100
OS_IN_NETWORK_STATE = INIT_STATE + 1
OS_OUT_NETWORK_STATE = OS_IN_NETWORK_STATE + 1
OS_DUMMY_RTR_STATE = OS_OUT_NETWORK_STATE + 1
OS_COMPL_STATE = OS_DUMMY_RTR_STATE
DCNM_IN_NETWORK_STATE = OS_DUMMY_RTR_STATE + 1
DCNM_IN_PART_UPDATE_STATE = DCNM_IN_NETWORK_STATE + 1
DCNM_OUT_PART_STATE = DCNM_IN_PART_UPDATE_STATE + 1
DCNM_OUT_NETWORK_STATE = DCNM_OUT_PART_STATE + 1
DCNM_OUT_PART_UPDATE_STATE = DCNM_OUT_NETWORK_STATE + 1
FABRIC_PREPARE_DONE_STATE = DCNM_OUT_PART_UPDATE_STATE + 1
# The below is for debug display
fw_state_fn_dict = {}
fw_state_fn_dict[INIT_STATE] = 'INIT_STATE'
fw_state_fn_dict[OS_IN_NETWORK_STATE] = 'OS_IN_NETWORK_CREATE_STATE'
fw_state_fn_dict[OS_OUT_NETWORK_STATE] = 'OS_OUT_NETWORK_CREATE_STATE'
fw_state_fn_dict[OS_DUMMY_RTR_STATE] = 'OS_DUMMY_RTR_CREATE_STATE'
fw_state_fn_dict[DCNM_IN_NETWORK_STATE] = 'DCNM_IN_NETWORK_CREATE_STATE'
fw_state_fn_dict[DCNM_IN_PART_UPDATE_STATE] = 'DCNM_IN_PART_UPDATE_STATE'
fw_state_fn_dict[DCNM_OUT_PART_STATE] = 'DCNM_OUT_PART_CREATE_STATE'
fw_state_fn_dict[DCNM_OUT_NETWORK_STATE] = 'DCNM_OUT_NETWORK_CREATE_STATE'
fw_state_fn_dict[DCNM_OUT_PART_UPDATE_STATE] = 'DCNM_OUT_PART_UPDATE_STATE'
fw_state_fn_dict[FABRIC_PREPARE_DONE_STATE] = 'FABRIC_PREPARE_DONE_STATE'
fw_state_fn_del_dict = {}
fw_state_fn_del_dict[INIT_STATE] = 'INIT_STATE'
fw_state_fn_del_dict[OS_IN_NETWORK_STATE] = 'OS_IN_NETWORK_DELETE_STATE'
fw_state_fn_del_dict[OS_OUT_NETWORK_STATE] = 'OS_OUT_NETWORK_DELETE_STATE'
fw_state_fn_del_dict[OS_DUMMY_RTR_STATE] = 'OS_DUMMY_RTR_DELETE_STATE'
fw_state_fn_del_dict[DCNM_IN_NETWORK_STATE] = 'DCNM_IN_NETWORK_DELETE_STATE'
fw_state_fn_del_dict[DCNM_IN_PART_UPDATE_STATE] = 'DCNM_IN_PART_UPDDEL_STATE'
fw_state_fn_del_dict[DCNM_OUT_PART_STATE] = 'DCNM_OUT_PART_DELETE_STATE'
fw_state_fn_del_dict[DCNM_OUT_NETWORK_STATE] = 'DCNM_OUT_NETWORK_DELETE_STATE'
fw_state_fn_del_dict[DCNM_OUT_PART_UPDATE_STATE] = 'DCNM_OUT_PART_UPDDEL_STATE'
fw_state_fn_del_dict[FABRIC_PREPARE_DONE_STATE] = 'FABRIC_PREPARE_DONE_STATE'
| apache-2.0 | 6,778,517,398,603,200,000 | 43.553957 | 79 | 0.743743 | false | 2.480176 | false | false | false |
angea/corkami | src/angecryption/rmll/angecryption/angecrypt.py | 1 | 1936 | #mini-AngeCryption
#Ange Albertini 2014, BSD Licence - with the help of Jean-Philippe Aumasson
import struct
import sys
import binascii
source_file, target_file, result_file, key = "logo11w.png", "duckduckgo.png", "angecrypted.png", "AngeCryptionKey!"
from Crypto.Cipher import AES
BS = 16
pad = lambda s: s if (len(s) % 16) == 0 else s + (16 - len(s) % 16) * "\0"
with open(source_file, "rb") as f:
s = pad(f.read())
with open(target_file, "rb") as f:
t = pad(f.read())
p = s[:BS] # our first plaintext block
ecb_dec = AES.new(key, AES.MODE_ECB)
assert BS >= 16
size = len(s) - BS
# our dummy chunk type
# 4 letters, first letter should be lowercase to be ignored
chunktype = 'rmll'
# PNG signature, chunk size, our dummy chunk type
c = PNGSIG = '\x89PNG\r\n\x1a\n' + struct.pack(">I",size) + chunktype
c = ecb_dec.decrypt(c)
IV = "".join([chr(ord(c[i]) ^ ord(p[i])) for i in range(BS)])
cbc_enc = AES.new(key, AES.MODE_CBC, IV)
result = cbc_enc.encrypt(s)
#write the CRC of the remaining of s at the end of our dummy block
result = result + struct.pack(">I", binascii.crc32(result[12:]) % 0x100000000)
#and append the actual data of t, skipping the sig
result = result + t[8:]
#we have our result, key and IV
#generate the result file
cbc_dec = AES.new(key, AES.MODE_CBC, IV)
with open(result_file, "wb") as f:
f.write(cbc_dec.decrypt(pad(result)))
print " ".join("%02X" % ord(i) for i in IV)
#generate the script
with open("crypt.py", "wb") as f:
f.write("""from Crypto.Cipher import %(AES)s
AES = %(AES)s.new(%(key)s, %(AES)s.MODE_CBC, %(IV)s)
with open(%(source)s, "rb") as f:
d = f.read()
d = AES.encrypt(d)
with open("encrypted.png", "wb") as f:
f.write(d)""" % {
'AES': AES.__name__.split(".")[-1],
'key':`key`,
'IV':`IV`,
'source':`result_file`,
'target':`target_file`}
) | bsd-2-clause | 7,468,474,675,663,712,000 | 24.547945 | 115 | 0.607438 | false | 2.674033 | false | false | false |
PaulSonOfLars/telegram-FinanceBot | modules/notes.py | 2 | 2056 | #!/usr/bin/env python3.5
# -*- coding: utf-8 -*-
""" This is the note module, taking care of all note related functions.
Note data is found in data/notes.json.
"""
from telegram.ext import CommandHandler
from modules import helper, strings
def save_note(bot, update, args):
notes = helper.loadjson(loc_notesjson)
chat_id = str(update.message.chat_id)
try:
notes[chat_id]
except KeyError:
notes[chat_id] = {}
if len(args) >= 2:
# add note to note repo
notename = args[0]
del args[0]
note_data = " ".join(args)
notes[chat_id][notename] = note_data
print("Added new note \"" + notename + "\" with content \"" + note_data + "\".")
else:
update.message.reply_text(strings.errBadFormat)
helper.dumpjson(loc_notesjson, notes)
def get_note(bot, update, args):
notes = helper.loadjson(loc_notesjson)
chat_id = str(update.message.chat_id)
try:
notes[chat_id]
except KeyError:
notes[chat_id] = {}
if len(args) == 1:
msg = ""
try:
msg = notes[chat_id][args[0]]
except KeyError:
msg = errNoNoteFound + args[0]
update.message.reply_text(msg)
else:
update.message.reply_text(strings.errBadFormat)
def all_notes(bot, update, args):
notes = helper.loadjson(loc_notesjson)
chat_id = str(update.message.chat_id)
try:
notes[chat_id]
except KeyError:
notes[chat_id] = {}
msg = "No notes in this chat."
if len(notes[chat_id]) > 0:
msg = msgNotesForChat
for note in notes[chat_id]:
msg += "\n" + note
update.message.reply_text(msg)
save_handler = CommandHandler("save", save_note, pass_args=True)
get_handler = CommandHandler("get", get_note, pass_args=True)
note_handler = CommandHandler("note", all_notes, pass_args=True)
loc_notesjson = "./data/notes.json"
msgNotesForChat = "These are the notes i have saved for this chat: \n"
errNoNoteFound = "No note found by the name of "
| apache-2.0 | -145,706,918,541,019,170 | 24.073171 | 88 | 0.612354 | false | 3.337662 | false | false | false |
Azure/azure-sdk-for-python | sdk/resources/azure-mgmt-resource/azure/mgmt/resource/resources/v2019_07_01/aio/_configuration.py | 1 | 3218 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy
from .._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class ResourceManagementClientConfiguration(Configuration):
"""Configuration for ResourceManagementClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The ID of the target subscription.
:type subscription_id: str
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
**kwargs: Any
) -> None:
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
super(ResourceManagementClientConfiguration, self).__init__(**kwargs)
self.credential = credential
self.subscription_id = subscription_id
self.api_version = "2019-07-01"
self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'mgmt-resource/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
| mit | 1,807,116,016,493,808,600 | 47.029851 | 134 | 0.682101 | false | 4.558074 | true | false | false |
beatorizu/tekton | backend/appengine/routes/temas/rest.py | 1 | 1610 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from google.appengine.ext import ndb
from distutils import log
from gaecookie.decorator import no_csrf
from gaepermission.decorator import login_not_required
from tekton.gae.middleware.json_middleware import JsonUnsecureResponse, JsonResponse
from tema.tema_model import TemaForm, Tema
__author__ = 'Bea'
@login_not_required
@no_csrf
def index():
query = Tema.query_ordenada_por_titulo()
temas = query.fetch()
for tema in temas:
key = tema.key
key_id = key.id()
form = TemaForm()
temas = [form.fill_with_model(t) for t in temas]
return JsonResponse(temas)
@login_not_required
@no_csrf
def salvar(_resp, **propriedades):
form = TemaForm(**propriedades)
erros = form.validate()
if erros:
_resp.set_status(400)
return JsonUnsecureResponse(erros)
tema = form.fill_model()
tema.put()
dct = form.fill_with_model(tema)
log.info(dct)
return JsonUnsecureResponse(dct)
@login_not_required
@no_csrf
def editar(_resp, **propriedades):
form = TemaForm(**propriedades)
erros = form.validate()
if erros:
_resp.set_status(400)
return JsonUnsecureResponse(erros)
tema = ndb.Key(Tema, int(propriedades['tema_id'])).get()
tema.titulo = propriedades['titulo']
tema.descricao = propriedades['descricao']
tema.put()
dct = form.fill_with_model(tema)
log.info(dct)
return JsonUnsecureResponse(dct)
@login_not_required
@no_csrf
def deletar(tema_id):
key = ndb.Key(Tema, int(tema_id))
key.delete() | mit | 6,514,255,939,189,863,000 | 26.305085 | 84 | 0.681988 | false | 2.959559 | false | false | false |
briot/geneapro | backend/geneaprove/migrations/0003_initial_data.py | 1 | 10022 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
def forward(apps, schema_editor):
Surety_Scheme = apps.get_model('geneaprove', 'Surety_Scheme')
SSPart = apps.get_model('geneaprove', 'Surety_Scheme_Part')
PPart = apps.get_model('geneaprove', 'Place_Part_Type')
RType = apps.get_model('geneaprove', 'Repository_Type')
EType = apps.get_model('geneaprove', 'Event_Type')
ETRole = apps.get_model('geneaprove', 'Event_Type_Role')
CPT = apps.get_model('geneaprove', 'Characteristic_Part_Type')
CIT = apps.get_model('geneaprove', 'Citation_Part_Type')
GT = apps.get_model('geneaprove', 'Group_Type')
db_alias = schema_editor.connection.alias
s = Surety_Scheme(
description="This is the default surety scheme, based on five levels of certainty",
name="Default scheme")
s.save()
SSPart.objects.using(db_alias).bulk_create([
SSPart(name="very high", scheme=s, sequence_number=5),
SSPart(name="high", scheme=s, sequence_number=4),
SSPart(name="normal", scheme=s, sequence_number=3),
SSPart(name="low", scheme=s, sequence_number=2),
SSPart(name="very low", scheme=s, sequence_number=1)])
PPart.objects.using(db_alias).bulk_create([
PPart(gedcom="ADR1", name="address"),
PPart(gedcom="ADR2", name="address2"),
PPart(gedcom="CITY", name="city"),
PPart(gedcom="CTRY", name="country"),
PPart(gedcom="", name="county"),
PPart(gedcom="MAP", name="GPS coordinates"),
PPart(gedcom="", name="monument"),
PPart(gedcom="", name="province"),
PPart(gedcom="STAE", name="state"),
PPart(gedcom="POST", name="zipcode"),
PPart(gedcom="WWW", name="website"),
PPart(gedcom="EMAIL", name="email"),
PPart(gedcom="FAX", name="fax"),
PPart(gedcom="PHON", name="phone"),
PPart(gedcom="WEB", name="website"),
PPart(gedcom="NOTE", name="note"),
PPart(gedcom="FORM", name="place hierarchy"),
])
CIT.objects.using(db_alias).bulk_create([
CIT(gedcom='TITL', name='title'),
CIT(gedcom='CHAN', name='last change'),
CIT(gedcom='DATE', name='date'),
CIT(gedcom='PAGE', name='page'),
CIT(gedcom='QUAY', name='quality'),
CIT(gedcom='TEXT', name='text'),
CIT(gedcom='AUTH', name='author'),
CIT(gedcom='PUBL', name='publisher')])
RType.objects.using(db_alias).bulk_create([
RType(description="", name="album"),
RType(description="", name="archive"),
RType(description="", name="bookstore"),
RType(description="", name="cemetery"),
RType(description="", name="church"),
RType(description="", name="collection"),
RType(description="", name="library"),
RType(description="", name="web site")])
EType.objects.using(db_alias).bulk_create([
EType(gedcom="", name="acquittal"),
EType(gedcom="ADOP", name="adoption"),
EType(gedcom="CHRA", name="adult christening"),
EType(gedcom="ANUL", name="annulment"),
EType(gedcom="", name="arrest"),
EType(gedcom="BAPM", name="baptism"),
EType(gedcom="BARM", name="bar mitzvah"),
EType(gedcom="BASM", name="bas mitzvah"),
EType(gedcom="BIRT", name="birth"),
EType(gedcom="BLES", name="blessing"),
EType(gedcom="BURI", name="burial"),
EType(gedcom="CENS", name="census"),
EType(gedcom="CHR", name="christening"),
EType(gedcom="", name="civil union"),
EType(gedcom="CONF", name="confirmation"),
EType(gedcom="", name="conviction"),
EType(gedcom="CREM", name="cremation"),
EType(gedcom="DEAT", name="death"),
EType(gedcom="DIV", name="divorce"),
EType(gedcom="DIVF", name="divorce filed"),
EType(gedcom="EMIG", name="emigration"),
EType(gedcom="ENGA", name="engagement"),
EType(gedcom="FCOM", name="first communion"),
EType(gedcom="GRAD", name="graduation"),
EType(gedcom="IMMI", name="immigration"),
EType(gedcom="", name="indictement"),
EType(gedcom="MARB", name="marriage bans"),
EType(gedcom="MARR", name="marriage"),
EType(gedcom="MARC", name="marriage contract"),
EType(gedcom="MARL", name="marriage license"),
EType(gedcom="MARS", name="marriage settlement"),
EType(gedcom="_MIL", name="military service"),
EType(gedcom="EDUC", name="education"),
EType(gedcom="_DEG", name="diploma"),
EType(gedcom="NATU", name="naturalization"),
EType(gedcom="ORDN", name="ordination"),
EType(gedcom="EVEN", name="other event"),
EType(gedcom="PROB", name="probate"),
EType(gedcom="", name="religious conversion"),
EType(gedcom="RESI", name="residence"),
EType(gedcom="RETI", name="retirement"),
EType(gedcom="", name="voyage"),
EType(gedcom="WILL", name="will")])
birth = EType.objects.get(gedcom="BIRT")
adoption = EType.objects.get(gedcom="ADOP")
ETRole.objects.using(db_alias).bulk_create([
ETRole(name="principal", type=None),
ETRole(name="father", type=birth),
ETRole(name="mother", type=birth),
ETRole(name="adopting", type=adoption),
ETRole(name="not adopting", type=adoption),
])
CPT.objects.using(db_alias).bulk_create([
CPT(gedcom="", is_name_part=False, name="address"),
CPT(gedcom="NOTE", is_name_part=False, name="note"),
CPT(gedcom="FACT", is_name_part=False, name="other"),
CPT(gedcom="_IMG", is_name_part=False, name="image"),
CPT(gedcom="OCCU", is_name_part=False, name="occupation"),
CPT(gedcom="", is_name_part=False, name="AFN"),
CPT(gedcom="", is_name_part=False, name="cause of death"),
CPT(gedcom="CAST", is_name_part=False, name="cast name"),
CPT(gedcom="PROP", is_name_part=False, name="property (real-estate,...)"),
CPT(gedcom="", is_name_part=False, name="email"),
CPT(gedcom="", is_name_part=False, name="ethnicity"),
CPT(gedcom="", is_name_part=False, name="language"),
CPT(gedcom="", is_name_part=False, name="literacy"),
CPT(gedcom="", is_name_part=False, name="living"),
CPT(gedcom="", is_name_part=False, name="marital status"),
CPT(gedcom="", is_name_part=False, name="medical condition"),
CPT(gedcom="", is_name_part=False, name="nationality"),
CPT(gedcom="NCHI", is_name_part=False, name="number of children"),
CPT(gedcom="NMR", is_name_part=False, name="number of marriages"),
CPT(gedcom="", is_name_part=False, name="patronymic"),
CPT(gedcom="", is_name_part=False, name="personality"),
CPT(gedcom="DSCR", is_name_part=False, name="physical description"),
CPT(gedcom="RELI", is_name_part=False, name="religion"),
CPT(gedcom="IDNO", is_name_part=False, name="national identification number"),
CPT(gedcom="NATI", is_name_part=False, name="national or tribe origin"),
CPT(gedcom="RFN", is_name_part=False, name="record file number"),
CPT(gedcom="AFN", is_name_part=False, name="ancestral file number"),
CPT(gedcom="RIN", is_name_part=False, name="RIN"),
CPT(gedcom="SEX", is_name_part=False, name="sex"),
CPT(gedcom="TYPE", is_name_part=False, name="type"),
CPT(gedcom="SSN", is_name_part=False,
name="social security number"),
CPT(gedcom="", is_name_part=False, name="telephone"),
CPT(gedcom="TITL", is_name_part=False, name="title"),
CPT(gedcom="REFN", is_name_part=False, name="reference number"),
CPT(gedcom="", is_name_part=True, name="dit name"),
CPT(gedcom="", is_name_part=True, name="farm name"),
CPT(gedcom="", is_name_part=True, name="matronymic name"),
CPT(gedcom="", is_name_part=True, name="mononame"),
CPT(gedcom="SURN", is_name_part=True, name="surname"),
CPT(gedcom="GIVN", is_name_part=True, name="given name"),
CPT(gedcom="_MIDL", is_name_part=True, name="middle name"),
CPT(gedcom="NPFX", is_name_part=True, name="name prefix"),
CPT(gedcom="NICK", is_name_part=True, name="nickname"),
CPT(gedcom="SPFX", is_name_part=True, name="surname prefix"),
CPT(gedcom="NSFX", is_name_part=True, name="name suffix"),
CPT(gedcom="", is_name_part=True, name="religious name")])
GT.objects.using(db_alias).bulk_create([
GT(gedcom="", name="association"),
GT(gedcom="", name="caste"),
GT(gedcom="", name="children of union"),
GT(gedcom="", name="friends"),
GT(gedcom="", name="neighbors"),
GT(gedcom="", name="passenger list"),
GT(gedcom="", name="passengers"),
GT(gedcom="", name="same person")])
class Migration(migrations.Migration):
dependencies = [
('geneaprove', '0002_auto_20180314_0957')
]
operations = [
migrations.RunPython(forward)
]
| gpl-2.0 | -5,780,604,873,144,046,000 | 50.659794 | 92 | 0.550489 | false | 3.260247 | false | false | false |
southpawtech/TACTIC-DEV | src/pyasm/biz/file.py | 1 | 34859 | ###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
__all__ = ["FileException", "File", "FileAccess", "IconCreator", "FileGroup", "FileRange"]
from pyasm.common import Common, Xml, TacticException, Environment, System, Config
from pyasm.search import *
from project import Project
from subprocess import Popen, PIPE
import sys, os, string, re, stat, glob
try:
#import Image
from PIL import Image
# Test to see if imaging actually works
import _imaging
HAS_PIL = True
except:
HAS_PIL = False
try:
import Image
# Test to see if imaging actually works
import _imaging
HAS_PIL = True
except:
HAS_PIL = False
# check if imagemagick is installed, and find exe if possible
convert_exe = ''
HAS_IMAGE_MAGICK = False
if os.name == "nt":
# prefer direct exe to not confuse with other convert.exe present on nt systems
convert_exe_list = glob.glob('C:\\Program Files\\ImageMagick*')
for exe in convert_exe_list:
try:
convert_process = Popen(['%s\\convert.exe'%exe,'-version'], stdout=PIPE, stderr=PIPE)
convert_return,convert_err = convert_process.communicate()
if 'ImageMagick' in convert_return:
convert_exe = '%s\\convert.exe'%exe
HAS_IMAGE_MAGICK = True
except:
print "Running %s failed" %exe
if not convert_exe_list:
# IM might not be in Program Files but may still be in PATH
try:
convert_process = Popen(['convert','-version'], stdout=PIPE, stderr=PIPE)
convert_return,convert_err = convert_process.communicate()
if 'ImageMagick' in convert_return:
convert_exe = 'convert'
HAS_IMAGE_MAGICK = True
except:
pass
else:
# in other systems (e.g. unix) 'convert' is expected to be in PATH
try:
convert_process = Popen(['convert','-version'], stdout=PIPE, stderr=PIPE)
convert_return,convert_err = convert_process.communicate()
if 'ImageMagick' in convert_return:
convert_exe = 'convert'
HAS_IMAGE_MAGICK = True
except:
pass
if Common.which("ffprobe"):
HAS_FFMPEG = True
else:
HAS_FFMPEG = False
import subprocess
class FileException(TacticException):
pass
class File(SObject):
NORMAL_EXT = ['max','ma','xls' ,'xlsx', 'doc', 'docx','txt', 'rtf', 'odt','fla','psd', 'xsi', 'scn', 'hip', 'xml','eani','pdf', 'fbx',
'gz', 'zip', 'rar',
'ini', 'db', 'py', 'pyd', 'spt'
]
VIDEO_EXT = ['mov','wmv','mpg','mpeg','m1v','m2v','mp2','mp4','mpa','mpe','mp4','wma','asf','asx','avi','wax',
'wm','wvx','ogg','webm','mkv','m4v','mxf','f4v','rmvb']
IMAGE_EXT = ['jpg','png','tif','tiff','gif','dds','dcm']
SEARCH_TYPE = "sthpw/file"
BASE_TYPE_SEQ = "sequence"
BASE_TYPE_DIR = "directory"
BASE_TYPE_FILE = "file"
def get_code(my):
return my.get_value("code")
def get_file_name(my):
return my.get_value("file_name")
def get_file_range(my):
return my.get_value("file_range")
def get_type(my):
return my.get_value("type")
def get_media_type_by_path(cls, path):
tmp, ext = os.path.splitext(path)
ext = ext.lstrip(".")
ext = ext.lower()
if ext in File.VIDEO_EXT:
return "video"
elif ext in File.NORMAL_EXT:
return "document"
else:
return "image"
get_media_type_by_path = classmethod(get_media_type_by_path)
def get_sobject(my):
'''get the sobject associated with this file'''
search = Search(my.get_value("search_type"))
search.add_id_filter(my.get_value("search_id"))
sobject = search.get_sobject()
return sobject
def get_full_file_name(my):
'''Gets the full file name. This is the same as get_file_name'''
return my.get_file_name()
def get_lib_dir(my,snapshot=None):
'''go through the stored snapshot_code to get the actual path'''
code = my.get_value("snapshot_code")
from snapshot import Snapshot
snapshot = Snapshot.get_by_code(code)
return snapshot.get_lib_dir()
def get_env_dir(my,snapshot=None):
'''go through the stored snapshot_code to get the actual path'''
code = my.get_value("snapshot_code")
from snapshot import Snapshot
snapshot = Snapshot.get_by_code(code)
return snapshot.get_env_dir()
def get_web_dir(my,snapshot=None):
'''go through the stored snapshot_code to get the actual path'''
code = my.get_value("snapshot_code")
from snapshot import Snapshot
snapshot = Snapshot.get_by_code(code)
return snapshot.get_web_dir()
def get_lib_path(my):
filename = my.get_full_file_name()
return "%s/%s" % (my.get_lib_dir(), filename)
def get_env_path(my):
'''path beginning with $TACTIC_ASSET_DIR'''
filename = my.get_full_file_name()
return "%s/%s" % (my.get_env_dir(), filename)
def get_web_path(my):
filename = my.get_full_file_name()
return "%s/%s" % (my.get_web_dir(), filename)
##################
# Static Methods
##################
"""
# DEPRERECATED
PADDING = 10
# DEPRERECATED
def add_file_code(file_path, file_code):
ext = ".".join( File.get_extensions(file_path) )
padded_id = str(file_code).zfill(File.PADDING)
file_path = file_path.replace(".%s" % ext, "_%s.%s" % (padded_id, ext) )
return file_path
add_file_code = staticmethod(add_file_code)
# DEPRERECATED
def remove_file_code(file_path):
new_path = re.compile(r'_(\w{%s})\.' % File.PADDING).sub(".", file_path)
return new_path
remove_file_code = staticmethod(remove_file_code)
# DEPRERECATED
def extract_file_code(file_path):
p = re.compile(r'_(\w{%s})\.' % File.PADDING)
m = p.search(file_path)
if not m:
return 0
groups = m.groups()
if not groups:
return 0
else:
file_code = groups[0]
# make sure there are only alpha/numberic characters
if file_code.find("_") != -1:
return 0
# make sure the first 3 are numeric
if not re.match('^\d{3}\w+$', file_code):
return 0
# strip out the leading zeros
return file_code.lstrip("0")
extract_file_code = staticmethod(extract_file_code)
# DEPRERECATED
def extract_file_path(file_path):
'''return file path without the unique id'''
p = re.compile(r'_(\w{%s})\.' % File.PADDING)
m = p.search(file_path)
if not m:
return file_path
groups = m.groups()
if not groups:
return file_path
else:
new_path = file_path.replace("_%s" % groups[0], "")
return new_path
extract_file_path = staticmethod(extract_file_path)
# DEPRERECATED
def has_file_code(file_path):
file_code = File.extract_file_code(file_path)
if file_code == 0:
return False
else:
return True
has_file_code = staticmethod(has_file_code)
"""
def get_extension(file_path):
'''get only the final extension'''
parts = os.path.basename(file_path).split(".")
ext = parts[len(parts)-1]
return ext
get_extension = staticmethod(get_extension)
def get_extensions(file_path):
'''get all of the extensions after the first .'''
parts = os.path.basename(file_path).split(".")
ext = parts[1:len(parts)]
return ext
get_extensions = staticmethod(get_extensions)
def get_by_snapshot(cls, snapshot, file_type=None):
xml = snapshot.get_xml_value("snapshot")
file_codes = xml.get_values("snapshot/file/@file_code")
search = Search( cls.SEARCH_TYPE)
search.add_filters("code", file_codes)
if file_type:
search.add_filter("type", file_type)
return search.get_sobjects()
get_by_snapshot = classmethod(get_by_snapshot)
def get_by_filename(cls, filename, skip_id=None, padding=0):
search = Search(cls.SEARCH_TYPE)
# if this is a file range then convert file name to padding
# FIXME: need some way to know what and where the padding is
if padding:
filename = re.sub("(.*\.)(\d+)", r"\1####", filename)
search.add_filter("file_name", filename)
project_code = Project.get_project_code()
search.add_filter("project_code", project_code)
if skip_id:
search.add_where('id != %s'%skip_id)
return search.get_sobject()
get_by_filename = classmethod(get_by_filename)
def get_by_snapshots(cls, snapshots, file_type=None):
all_file_codes = []
for snapshot in snapshots:
xml = snapshot.get_xml_value("snapshot")
file_codes = xml.get_values("snapshot/file/@file_code")
all_file_codes.extend(file_codes)
search = Search( cls.SEARCH_TYPE)
search.add_filters("code", all_file_codes)
if file_type:
search.add_filter("type", file_type)
files = search.get_sobjects()
# cache these
for file in files:
key = "%s|%s" % (file.get_search_type(),file.get_code())
SObject.cache_sobject(key, file)
return files
get_by_snapshots = classmethod(get_by_snapshots)
# DEPRECATED
"""
def get_by_path(path):
file_code = File.extract_file_code(path)
if file_code == 0:
return None
search = Search(File.SEARCH_TYPE)
search.add_id_filter(file_code)
file = search.get_sobject()
return file
get_by_path = staticmethod(get_by_path)
"""
def get_by_path(path):
asset_dir = Environment.get_asset_dir()
path = path.replace("%s/" % asset_dir, "")
relative_dir = os.path.dirname(path)
file_name = os.path.basename(path)
# NOTE: this does not work with base_dir_alias
search = Search("sthpw/file")
search.add_filter("relative_dir", relative_dir)
search.add_filter("file_name", file_name)
sobject = search.get_sobject()
return sobject
get_by_path = staticmethod(get_by_path)
def create( file_path, search_type, search_id, file_type=None, requires_file=True, st_size=None, repo_type=None, search_code = None):
exists = os.path.exists(file_path)
isdir = os.path.isdir(file_path)
if requires_file and not os.path.exists(file_path):
raise FileException("File '%s' does not exist" % file_path)
file_name = os.path.basename(file_path)
file = File(File.SEARCH_TYPE)
file.set_value("file_name", file_name)
file.set_value("search_type", search_type)
if search_code:
file.set_value("search_code", search_code)
# MongoDb
if search_id and isinstance(search_id, int):
file.set_value("search_id", search_id)
if file_type:
file.set_value("type", file_type)
if isdir:
file.set_value("base_type", File.BASE_TYPE_DIR)
else:
file.set_value("base_type", File.BASE_TYPE_FILE)
project = Project.get()
file.set_value("project_code", project.get_code())
if exists:
if isdir:
dir_info = Common.get_dir_info(file_path)
size = dir_info.get("size")
file.set_value("st_size", size)
else:
from stat import ST_SIZE
size = os.stat(file_path)[ST_SIZE]
file.set_value("st_size", size)
elif st_size != None:
file.set_value("st_size", st_size)
if repo_type:
file.set_value("repo_type", repo_type)
file.commit()
return file
create = staticmethod(create)
def makedirs(dir, mode=None):
'''wrapper to mkdirs in case it ever needs to be overridden'''
print "DEPRECATED: use System().makedirs()"
return System().makedirs(dir,mode)
makedirs = staticmethod(makedirs)
def get_filesystem_name(name, strict=True):
'''takes a name and converts it to a name that can be saved in
the filesystem.'''
filename = name
filename = filename.replace("/", "__")
filename = filename.replace("|", "__")
filename = filename.replace(":", "__")
filename = filename.replace("?", "__")
filename = filename.replace("=", "__")
if strict:
filename = filename.replace(" ", "_")
filename_base, ext = os.path.splitext(filename)
ext = string.lower(ext)
filename = "%s%s" % (filename_base, ext)
return filename
get_filesystem_name = staticmethod(get_filesystem_name)
def process_file_path(file_path):
'''makes a file path completely kosher with the file system. Only do it on basename or it would remove the : from C:/'''
return Common.get_filesystem_name(file_path)
process_file_path = staticmethod(process_file_path)
def get_md5(path):
'''get md5 checksum'''
py_exec = Config.get_value("services", "python")
if not py_exec:
py_exec = "python"
if isinstance(path, unicode):
path = path.encode('utf-8')
popen = subprocess.Popen([py_exec, '%s/src/bin/get_md5.py'%Environment.get_install_dir(), path], shell=False, stdout=subprocess.PIPE)
popen.wait()
output = ''
value = popen.communicate()
if value:
output = value[0].strip()
if not output:
err = value[1]
print err
return output
get_md5 = staticmethod(get_md5)
def is_file_group(file_path):
'''returns True if it is a file group'''
return not (file_path.find('#') == -1 and file_path.find('%') == -1)
is_file_group = staticmethod(is_file_group)
class FileAccess(SObject):
SEARCH_TYPE = "sthpw/file_access"
def create(file):
file_code = file.get_code()
file_access = FileAccess(FileAccess.SEARCH_TYPE)
file_access.set_value("file_code", file_code)
security = WebContainer.get_security()
user = security.get_user_name()
file_access.set_value("login", user)
file_access.commit()
return file_access
create = staticmethod(create)
class IconCreator(object):
'''Utility class that creates icons of an image or document in the
same directory as the image'''
def __init__(my, file_path):
my.file_path = file_path
# check if it exists
if not os.path.exists( file_path ):
raise FileException( \
"Error: file [%s] does not exist" % my.file_path )
my.tmp_dir = os.path.dirname(file_path)
my.icon_path = None
my.web_path = None
my.texture_mode = False
my.icon_mode = False
def set_texture_mode(my):
'''texture mode down res is 1/4 size'''
my.texture_mode = True
def set_icon_mode(my):
'''icon mode down res is 1/4 size'''
my.icon_mode = True
def get_icon_path(my):
return my.icon_path
def get_web_path(my):
return my.web_path
def create_icons(my):
my.execute()
def execute(my):
# check file name
file_name = os.path.basename(my.file_path)
ext = File.get_extension(file_name)
type = string.lower(ext)
if type == "pdf":
my._process_pdf( file_name )
elif type in File.NORMAL_EXT:
# skip icon generation for normal or video files
pass
elif type in File.VIDEO_EXT:
try:
my._process_video( file_name )
except IOError, e:
'''This is an unknown file type. Do nothing and except as a
file'''
print "WARNING: ", e.__str__()
Environment.add_warning("Unknown file type", e.__str__())
else:
# assume it is an image
try:
my._process_image( file_name )
except IOError, e:
'''This is an unknown file type. Do nothing and except as a
file'''
print "WARNING: ", e.__str__()
Environment.add_warning("Unknown file type", e.__str__())
def _process_pdf(my, file_name):
base, ext = os.path.splitext(file_name)
icon_file_name = base + "_icon.png"
tmp_icon_path = "%s/%s" % (my.tmp_dir, icon_file_name)
if sys.platform == 'darwin':
return
else:
if not Common.which("convert"):
return
try:
my.file_path = my.file_path.encode('utf-8')
import shlex, subprocess
subprocess.call(['convert', '-geometry','80','-raise','2x2','%s[0]'%my.file_path,\
"%s"%tmp_icon_path])
except Exception, e:
print "Error extracting from pdf [%s]" % e
return
# check that it actually got created
if os.path.exists(tmp_icon_path):
my.icon_path = tmp_icon_path
else:
print "Warning: [%s] did not get created from pdf" % tmp_icon_path
def get_web_file_size(my):
from pyasm.prod.biz import ProdSetting
web_file_size = ProdSetting.get_value_by_key('web_file_size')
thumb_size = (640, 480)
if web_file_size:
parts = re.split('[\Wx]+', web_file_size)
thumb_size = (640, 480)
if len(parts) == 2:
try:
thumb_size = (int(parts[0]), int(parts[1]))
except ValueError:
thumb_size = (640, 480)
return thumb_size
def _process_video(my, file_name):
ffmpeg = Common.which("ffmpeg")
if not ffmpeg:
return
thumb_web_size = my.get_web_file_size()
thumb_icon_size = (120, 100)
exts = File.get_extensions(file_name)
base, ext = os.path.splitext(file_name)
icon_file_name = "%s_icon.png" % base
web_file_name = "%s_web.jpg" % base
tmp_icon_path = "%s/%s" % (my.tmp_dir, icon_file_name)
tmp_web_path = "%s/%s" % (my.tmp_dir, web_file_name)
#cmd = '''"%s" -i "%s" -r 1 -ss 00:00:01 -t 1 -s %sx%s -vframes 1 "%s"''' % (ffmpeg, my.file_path, thumb_web_size[0], thumb_web_size[1], tmp_web_path)
#os.system(cmd)
import subprocess
try:
subprocess.call([ffmpeg, '-i', my.file_path, "-y", "-ss", "00:00:01","-t","1",\
"-s","%sx%s"%(thumb_web_size[0], thumb_web_size[1]),"-vframes","1","-f","image2", tmp_web_path])
if os.path.exists(tmp_web_path):
my.web_path = tmp_web_path
else:
my.web_path = None
except Exception, e:
Environment.add_warning("Could not process file", \
"%s - %s" % (my.file_path, e.__str__()))
pass
try:
subprocess.call([ffmpeg, '-i', my.file_path, "-y", "-ss", "00:00:01","-t","1",\
"-s","%sx%s"%(thumb_icon_size[0], thumb_icon_size[1]),"-vframes","1","-f","image2", tmp_icon_path])
if os.path.exists(tmp_icon_path):
my.icon_path = tmp_icon_path
else:
my.icon_path = None
except Exception, e:
Environment.add_warning("Could not process file", \
"%s - %s" % (my.file_path, e.__str__()))
pass
def _process_image(my, file_name):
base, ext = os.path.splitext(file_name)
# get all of the extensions
exts = File.get_extensions(file_name)
frame = 0
if len(exts) == 2:
try:
frame = int(exts[0])
base = base.replace(".%s" % exts[0], '' )
except ValueError:
frame = 0
if frame:
icon_file_name = "%s_icon.%s.png" % (base, exts[0])
web_file_name = "%s_web.%s.jpg" % (base, exts[0])
else:
icon_file_name = "%s_icon.png" % base
web_file_name = "%s_web.jpg" % base
tmp_icon_path = "%s/%s" % (my.tmp_dir, icon_file_name)
tmp_web_path = "%s/%s" % (my.tmp_dir, web_file_name)
# create the web image
try:
if my.texture_mode:
my._resize_texture(my.file_path, tmp_web_path, 0.5)
my.web_path = tmp_web_path
# create the icon
thumb_size = (120,100)
try:
my._resize_image(tmp_web_path, tmp_icon_path, thumb_size)
except TacticException:
my.icon_path = None
else:
my.icon_path = tmp_icon_path
elif my.icon_mode: # just icon, no web
# create the icon only
thumb_size = (120,100)
try:
my._resize_image(my.file_path, tmp_icon_path, thumb_size)
except TacticException:
my.icon_path = None
else:
my.icon_path = tmp_icon_path
else:
thumb_size = my.get_web_file_size()
try:
my._resize_image(my.file_path, tmp_web_path, thumb_size)
except TacticException:
my.web_path = None
else:
my.web_path = tmp_web_path
# create the icon
thumb_size = (120,100)
try:
my._resize_image(tmp_web_path, tmp_icon_path, thumb_size)
except TacticException:
my.icon_path = None
else:
my.icon_path = tmp_icon_path
# check icon file size, reset to none if it is empty
# TODO: use finally in Python 2.5
if my.web_path:
web_path_size = os.stat(my.web_path)[stat.ST_SIZE]
if not web_path_size:
my.web_path = None
if my.icon_path:
icon_path_size = os.stat(my.icon_path)[stat.ST_SIZE]
if not icon_path_size:
my.icon_path = None
except IOError, e:
Environment.add_warning("Could not process file", \
"%s - %s" % (my.file_path, e.__str__()))
my.web_path = None
my.icon_path = None
def _extract_frame(my, large_path, small_path, thumb_size):
pass
def _resize_image(my, large_path, small_path, thumb_size):
try:
large_path = large_path.encode('utf-8')
small_path = small_path.encode('utf-8')
if HAS_IMAGE_MAGICK:
# generate imagemagick command
convert_cmd = []
convert_cmd.append(convert_exe)
# png's and psd's can have multiple layers which need to be flattened to make an accurate thumbnail
if large_path.lower().endswith('png'):
convert_cmd.append('-flatten')
if large_path.lower().endswith('psd'):
large_path += "[0]"
convert_cmd.extend(['-resize','%sx%s'%(thumb_size[0], thumb_size[1])])
# FIXME: needs PIL for this ... should use ImageMagick to find image size
if HAS_PIL:
try:
im = Image.open(large_path)
x,y = im.size
except Exception, e:
print "WARNING: ", e
x = 0
y = 0
if x < y:
# icons become awkward if height is bigger than width
# add white background for more reasonable icons
convert_cmd.extend(['-background','white'])
convert_cmd.extend(['-gravity','center'])
convert_cmd.extend(['-extent','%sx%s'%(thumb_size[0], thumb_size[1])])
convert_cmd.append('%s'%(large_path))
convert_cmd.append('%s'%(small_path))
subprocess.call(convert_cmd)
# if we don't have ImageMagick, use PIL, if installed (in non-mac os systems)
elif HAS_PIL:
# use PIL
# create the thumbnail
im = Image.open(large_path)
try:
im.seek(1)
except EOFError:
is_animated = False
else:
is_animated = True
im.seek(0)
im = im.convert('RGB')
x,y = im.size
to_ext = "PNG"
if small_path.lower().endswith('jpg') or small_path.lower().endswith('jpeg'):
to_ext = "JPEG"
if x >= y:
im.thumbnail( (thumb_size[0],10000), Image.ANTIALIAS )
im.save(small_path, to_ext)
else:
#im.thumbnail( (10000,thumb_size[1]), Image.ANTIALIAS )
x,y = im.size
# first resize to match this thumb_size
base_height = thumb_size[1]
h_percent = (base_height/float(y))
base_width = int((float(x) * float(h_percent)))
im = im.resize((base_width, base_height), Image.ANTIALIAS )
# then paste to white image
im2 = Image.new( "RGB", thumb_size, (255,255,255) )
offset = (thumb_size[0]/2) - (im.size[0]/2)
im2.paste(im, (offset,0) )
im2.save(small_path, to_ext)
# if neither IM nor PIL is installed, check if this is a mac system and use sips if so
elif sys.platform == 'darwin':
convert_cmd = ['sips', '--resampleWidth', '%s'%thumb_size[0], '--out', small_path, large_path]
subprocess.call(convert_cmd)
else:
raise TacticException('No image manipulation tool installed')
except Exception, e:
print "Error: ", e
# after these operations, confirm that the icon has been generated
if not os.path.exists(small_path):
raise TacticException('Icon generation failed')
def _resize_texture(my, large_path, small_path, scale):
# create the thumbnail
try:
im = Image.open(large_path)
x,y = im.size
resize = int( float(x) * scale )
im.thumbnail( (resize,10000), Image.ANTIALIAS )
im.save(small_path, "PNG")
except:
if sys.platform == 'darwin':
cmd = "sips --resampleWidth 25%% --out %s %s" \
% (large_path, small_path)
else:
cmd = "convert -resize 25%% %s %s" \
% (large_path, small_path)
os.system(cmd)
if not os.path.exists(small_path):
raise
def add_icons(file_paths):
new_file_paths=[]
new_file_types=[]
for file_path in file_paths:
# create icons and add to the list
creator = IconCreator(file_path)
creator.create_icons()
icon_path = creator.get_icon_path()
new_file_paths.append(icon_path)
new_file_types.append("icon")
web_path = creator.get_web_path()
new_file_paths.append(web_path)
new_file_types.append("web")
return new_file_paths, new_file_types
add_icons = staticmethod(add_icons)
class FileGroup(File):
'''Handles groups of files.
The file paths have the following syntax
<file>.####
Where the number signs indicate padding to be replaced by the file_range
The file_range parameter has the following syntax:
1-12 Means from files 1-12
'''
def check_paths(file_path, file_range):
''' check existence of files. this expects a FileRange object'''
expanded = FileGroup.expand_paths(file_path, file_range)
for expand in expanded:
if not System().exists(expand):
raise FileException("File '%s' does not exist!" % expand)
return expanded
check_paths = staticmethod(check_paths)
def create( file_path, file_range, search_type, search_id, file_type=None ):
expanded = FileGroup.check_paths(file_path, file_range)
file_name = os.path.basename(file_path)
file = File(File.SEARCH_TYPE)
file.set_value("file_name", file_name)
file.set_value("search_type", search_type)
file.set_value("search_id", search_id)
from stat import ST_SIZE
total = 0
for expanded in expanded:
size = os.stat(expanded)[ST_SIZE]
total += size
project = Project.get()
file.set_value("project_code", project.get_code())
file.set_value("st_size", total)
file.set_value("file_range", file_range.get_key())
if file_type:
file.set_value("type", file_type)
file.set_value("base_type", File.BASE_TYPE_SEQ)
file.commit()
return file
create = staticmethod(create)
def expand_paths( file_path, file_range ):
'''expands the file paths, replacing # as specified in the file_range object'''
file_paths = []
# frame_by is not really used here yet
frame_start, frame_end, frame_by = file_range.get_values()
# support %0.4d notation
if file_path.find('#') == -1:
for i in range(frame_start, frame_end+1, frame_by):
expanded = file_path % i
file_paths.append( expanded )
else:
# find out the number of #'s in the path
padding = len( file_path[file_path.index('#'):file_path.rindex('#')] )+1
for i in range(frame_start, frame_end+1, frame_by):
expanded = file_path.replace( '#'*padding, str(i).zfill(padding) )
file_paths.append(expanded)
return file_paths
expand_paths = staticmethod(expand_paths)
def extract_template_and_range(cls, paths):
frame = None
# do we extract a range?
padding = 0
for i in range(12,0,-1):
p = re.compile("(\d{%d,})" % i)
path = paths[0].replace("\\", "/")
basename = os.path.basename(path)
dirname = os.path.dirname(path)
m = p.search(basename)
if m:
frame = m.groups()[0]
padding = len(frame)
break
if not frame:
padding = 4
frame = 'x'*padding
template = basename.replace(frame, '#'*padding)
frange = []
last_frame = None
p = re.compile("(\d{%s})" % padding)
for path in paths:
path = path.replace("\\", "/")
basename = os.path.basename(path)
m = p.search(basename)
if m:
frame = int(m.groups()[0])
else:
frame = 0
# the first one is always added
if last_frame == None:
frange.append(frame)
frange.append('-')
frange.append(frame)
last_frame = frame
continue
# the next ones are not
diff = frame - last_frame
if diff == 1:
frange[-1] = frame
else:
frange.append(frame)
frange.append('-')
frange.append(frame)
last_frame = frame
template = "%s/%s" % (dirname,template)
frange = "".join([str(x) for x in frange])
return template, frange
extract_template_and_range = classmethod(extract_template_and_range)
class FileRange(object):
def __init__(my, frame_start=1, frame_end=1, frame_by=1):
my.frame_start = frame_start
my.frame_end = frame_end
my.frame_by = frame_by
assert(isinstance(frame_start, (int)))
assert(isinstance(frame_end, (int)))
assert(isinstance(frame_by, (int)))
def get_frame_by(my):
return my.frame_by
def get_frame_start(my):
return my.frame_start
def get_frame_end(my):
return my.frame_end
def set_frame_by(my, frame_by):
assert(isinstance(frame_by, (int)))
my.frame_by = frame_by
def set_duration(my, duration):
my.frame_start = 1
my.frame_end = duration
def get_num_frames(my):
return (my.frame_end - my.frame_start + 1) / my.frame_by
def get_key(my):
return "%s-%s/%s" % (my.frame_start, my.frame_end, my.frame_by)
def get_display(my):
if my.frame_by == 1:
return "%s-%s" % (my.frame_start, my.frame_end)
else:
return my.get_key()
def get_values(my):
return (my.frame_start, my.frame_end, my.frame_by)
# static method
def get(file_range):
''' build a FileRange obj from a string'''
frame_by = 1
if file_range.find("/") != -1:
file_range, frame_by = file_range.split("/")
tmps = file_range.split("-")
if len(tmps) > 2:
raise FileException("Unable to determine file_range [%s]" %file_range)
frame_start, frame_end = tmps[0], tmps[1]
frame_start = int(frame_start)
frame_end = int(frame_end)
frame_by = int(frame_by)
return FileRange(frame_start, frame_end, frame_by)
get = staticmethod(get)
| epl-1.0 | 5,453,433,257,592,640,000 | 29.903369 | 158 | 0.526464 | false | 3.817236 | false | false | false |
alirizakeles/zato | code/zato-web-admin/src/zato/admin/web/views/definition/cassandra.py | 1 | 2702 | # -*- coding: utf-8 -*-
"""
Copyright (C) 2013 Dariusz Suchojad <dsuch at zato.io>
Licensed under LGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import logging
# Zato
from zato.admin.web.forms import ChangePasswordForm
from zato.admin.web.forms.definition.cassandra import CreateForm, EditForm
from zato.admin.web.views import change_password as _change_password, CreateEdit, Delete as _Delete, Index as _Index, method_allowed
from zato.common import CASSANDRA
from zato.common.odb.model import CassandraConn
logger = logging.getLogger(__name__)
class Index(_Index):
method_allowed = 'GET'
url_name = 'def-cassandra'
template = 'zato/definition/cassandra.html'
service_name = 'zato.definition.cassandra.get-list'
output_class = CassandraConn
paginate = True
class SimpleIO(_Index.SimpleIO):
input_required = ('cluster_id',)
output_required = ('id', 'name', 'is_active', 'contact_points', 'port', 'exec_size',
'proto_version', 'default_keyspace')
output_optional = ('username', 'cql_version')
output_repeated = True
def handle(self):
return {
'default_port': CASSANDRA.DEFAULT.PORT.value,
'default_exec_size': CASSANDRA.DEFAULT.EXEC_SIZE.value,
'default_proto_version': CASSANDRA.DEFAULT.PROTOCOL_VERSION.value,
'create_form': CreateForm(),
'edit_form': EditForm(prefix='edit'),
'change_password_form': ChangePasswordForm()
}
class _CreateEdit(CreateEdit):
method_allowed = 'POST'
class SimpleIO(CreateEdit.SimpleIO):
input_required = ('cluster_id', 'name', 'is_active', 'contact_points', 'port', 'exec_size',
'proto_version', 'default_keyspace')
input_optional = ('username', 'cql_version', 'tls_ca_certs', 'tls_client_cert', 'tls_client_priv_key')
output_required = ('id', 'name')
def success_message(self, item):
return 'Successfully {0} the connection [{1}]'.format(self.verb, item.name)
class Create(_CreateEdit):
url_name = 'definition-cassandra-create'
service_name = 'zato.definition.cassandra.create'
class Edit(_CreateEdit):
url_name = 'definition-cassandra-edit'
form_prefix = 'edit-'
service_name = 'zato.definition.cassandra.edit'
class Delete(_Delete):
url_name = 'definition-cassandra-delete'
error_message = 'Could not delete the connection'
service_name = 'zato.definition.cassandra.delete'
@method_allowed('POST')
def change_password(req):
return _change_password(req, 'zato.definition.cassandra.change-password')
| gpl-3.0 | 2,134,947,678,236,529,000 | 34.552632 | 132 | 0.676906 | false | 3.527415 | false | false | false |
pmarks-net/grpc | tools/distrib/python/grpcio_tools/setup.py | 9 | 7849 | # Copyright 2016, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from distutils import cygwinccompiler
from distutils import extension
from distutils import util
import errno
import os
import os.path
import pkg_resources
import platform
import re
import shlex
import shutil
import sys
import sysconfig
import setuptools
from setuptools.command import build_ext
# TODO(atash) add flag to disable Cython use
os.chdir(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, os.path.abspath('.'))
import protoc_lib_deps
import grpc_version
PY3 = sys.version_info.major == 3
# Environment variable to determine whether or not the Cython extension should
# *use* Cython or use the generated C files. Note that this requires the C files
# to have been generated by building first *with* Cython support.
BUILD_WITH_CYTHON = os.environ.get('GRPC_PYTHON_BUILD_WITH_CYTHON', False)
# There are some situations (like on Windows) where CC, CFLAGS, and LDFLAGS are
# entirely ignored/dropped/forgotten by distutils and its Cygwin/MinGW support.
# We use these environment variables to thus get around that without locking
# ourselves in w.r.t. the multitude of operating systems this ought to build on.
# We can also use these variables as a way to inject environment-specific
# compiler/linker flags. We assume GCC-like compilers and/or MinGW as a
# reasonable default.
EXTRA_ENV_COMPILE_ARGS = os.environ.get('GRPC_PYTHON_CFLAGS', None)
EXTRA_ENV_LINK_ARGS = os.environ.get('GRPC_PYTHON_LDFLAGS', None)
if EXTRA_ENV_COMPILE_ARGS is None:
EXTRA_ENV_COMPILE_ARGS = '-std=c++11'
if 'win32' in sys.platform:
if sys.version_info < (3, 5):
# We use define flags here and don't directly add to DEFINE_MACROS below to
# ensure that the expert user/builder has a way of turning it off (via the
# envvars) without adding yet more GRPC-specific envvars.
# See https://sourceforge.net/p/mingw-w64/bugs/363/
if '32' in platform.architecture()[0]:
EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime32 -D_timeb=__timeb32 -D_ftime_s=_ftime32_s'
else:
EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime64 -D_timeb=__timeb64'
else:
# We need to statically link the C++ Runtime, only the C runtime is
# available dynamically
EXTRA_ENV_COMPILE_ARGS += ' /MT'
elif "linux" in sys.platform or "darwin" in sys.platform:
EXTRA_ENV_COMPILE_ARGS += ' -fno-wrapv -frtti'
if EXTRA_ENV_LINK_ARGS is None:
EXTRA_ENV_LINK_ARGS = ''
if "linux" in sys.platform or "darwin" in sys.platform:
EXTRA_ENV_LINK_ARGS += ' -lpthread'
elif "win32" in sys.platform and sys.version_info < (3, 5):
msvcr = cygwinccompiler.get_msvcr()[0]
# TODO(atash) sift through the GCC specs to see if libstdc++ can have any
# influence on the linkage outcome on MinGW for non-C++ programs.
EXTRA_ENV_LINK_ARGS += (
' -static-libgcc -static-libstdc++ -mcrtdll={msvcr} '
'-static'.format(msvcr=msvcr))
EXTRA_COMPILE_ARGS = shlex.split(EXTRA_ENV_COMPILE_ARGS)
EXTRA_LINK_ARGS = shlex.split(EXTRA_ENV_LINK_ARGS)
CC_FILES = [
os.path.normpath(cc_file) for cc_file in protoc_lib_deps.CC_FILES]
PROTO_FILES = [
os.path.normpath(proto_file) for proto_file in protoc_lib_deps.PROTO_FILES]
CC_INCLUDE = os.path.normpath(protoc_lib_deps.CC_INCLUDE)
PROTO_INCLUDE = os.path.normpath(protoc_lib_deps.PROTO_INCLUDE)
GRPC_PYTHON_TOOLS_PACKAGE = 'grpc_tools'
GRPC_PYTHON_PROTO_RESOURCES_NAME = '_proto'
DEFINE_MACROS = ()
if "win32" in sys.platform:
DEFINE_MACROS += (('WIN32_LEAN_AND_MEAN', 1),)
if '64bit' in platform.architecture()[0]:
DEFINE_MACROS += (('MS_WIN64', 1),)
elif "linux" in sys.platform or "darwin" in sys.platform:
DEFINE_MACROS += (('HAVE_PTHREAD', 1),)
# By default, Python3 distutils enforces compatibility of
# c plugins (.so files) with the OSX version Python3 was built with.
# For Python3.4, this is OSX 10.6, but we need Thread Local Support (__thread)
if 'darwin' in sys.platform and PY3:
mac_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
if mac_target and (pkg_resources.parse_version(mac_target) <
pkg_resources.parse_version('10.9.0')):
os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.9'
os.environ['_PYTHON_HOST_PLATFORM'] = re.sub(
r'macosx-[0-9]+\.[0-9]+-(.+)',
r'macosx-10.9-\1',
util.get_platform())
def package_data():
tools_path = GRPC_PYTHON_TOOLS_PACKAGE.replace('.', os.path.sep)
proto_resources_path = os.path.join(tools_path,
GRPC_PYTHON_PROTO_RESOURCES_NAME)
proto_files = []
for proto_file in PROTO_FILES:
source = os.path.join(PROTO_INCLUDE, proto_file)
target = os.path.join(proto_resources_path, proto_file)
relative_target = os.path.join(GRPC_PYTHON_PROTO_RESOURCES_NAME, proto_file)
try:
os.makedirs(os.path.dirname(target))
except OSError as error:
if error.errno == errno.EEXIST:
pass
else:
raise
shutil.copy(source, target)
proto_files.append(relative_target)
return {GRPC_PYTHON_TOOLS_PACKAGE: proto_files}
def extension_modules():
if BUILD_WITH_CYTHON:
plugin_sources = [os.path.join('grpc_tools', '_protoc_compiler.pyx')]
else:
plugin_sources = [os.path.join('grpc_tools', '_protoc_compiler.cpp')]
plugin_sources += [
os.path.join('grpc_tools', 'main.cc'),
os.path.join('grpc_root', 'src', 'compiler', 'python_generator.cc')] + [
os.path.join(CC_INCLUDE, cc_file)
for cc_file in CC_FILES]
plugin_ext = extension.Extension(
name='grpc_tools._protoc_compiler',
sources=plugin_sources,
include_dirs=[
'.',
'grpc_root',
os.path.join('grpc_root', 'include'),
CC_INCLUDE,
],
language='c++',
define_macros=list(DEFINE_MACROS),
extra_compile_args=list(EXTRA_COMPILE_ARGS),
extra_link_args=list(EXTRA_LINK_ARGS),
)
extensions = [plugin_ext]
if BUILD_WITH_CYTHON:
from Cython import Build
return Build.cythonize(extensions)
else:
return extensions
setuptools.setup(
name='grpcio-tools',
version=grpc_version.VERSION,
license='3-clause BSD',
ext_modules=extension_modules(),
packages=setuptools.find_packages('.'),
install_requires=[
'protobuf>=3.0.0',
'grpcio>={version}'.format(version=grpc_version.VERSION),
],
package_data=package_data(),
)
| bsd-3-clause | -6,471,850,476,679,639,000 | 38.84264 | 95 | 0.705822 | false | 3.456187 | false | false | false |
gotling/PyTach | pytach/web/web.py | 1 | 1769 | # -*- coding: utf-8 -*-
import os
import re
import bottle
import string
import inspect
from bottle import static_file, template, url, request
import dispatch
static_path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) + '/static'
bottle.TEMPLATE_PATH.insert(0, os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) +'/views')
app = application = bottle.Bottle()
bottle.default_app.push(app)
bottle.BaseTemplate.defaults['url'] = url
@app.route('/', name='main')
def main():
return template('main', devices=dispatch.devices, activities=dispatch.activities, request=request)
@app.route('/static/<filename:path>', name='static')
def static(filename):
return static_file(filename, root=static_path)
@app.route('/activity/<activity>', name='activity_view', method='GET')
def activity_view(activity):
return template('activity', activity=dispatch.activities[activity], devices=dispatch.devices, activities=dispatch.activities, request=request)
@app.route('/activity/<activity:path>', name='activity', method='POST')
def activity(activity):
activity, command = activity.split('/')
try:
dispatch.activity(activity, command)
except NameError, e:
print "Input error:", e
@app.route('/device/<device>', name='device_view', method='GET')
def device_view(device):
return template('device', device=dispatch.devices[device], devices=dispatch.devices, activities=dispatch.activities, request=request)
@app.route('/device/<device:path>', name='device', method='POST')
def device(device):
device, command = device.split('/')
try:
result = dispatch.device(device, command)
return result if result else ""
except NameError, e:
print "Input error:", e | mit | -5,929,668,997,344,574,000 | 33.705882 | 146 | 0.715093 | false | 3.771855 | false | false | false |
TamiaLab/carnetdumaker | apps/announcements/tests/test_views.py | 1 | 8883 | """
Tests suite for the views of the announcements app.
"""
from datetime import timedelta
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
from django.contrib.auth import get_user_model
from django.utils import timezone
from ..models import (Announcement,
AnnouncementTag)
class AnnouncementViewsTestCase(TestCase):
"""
Tests case for the views.
"""
def setUp(self):
"""
Create some fixtures for the tests.
"""
now = timezone.now()
past_now = now - timedelta(seconds=1)
future_now = now + timedelta(seconds=100)
author = get_user_model().objects.create_user(username='johndoe',
password='illpassword',
email='john.doe@example.com')
self.announcement_unpublished = Announcement.objects.create(title='Test 1',
slug='test-1',
author=author,
content='Hello World!')
self.announcement_published = Announcement.objects.create(title='Test 2',
slug='test-2',
author=author,
content='Hello World!',
pub_date=past_now)
self.announcement_published = Announcement.objects.create(title='Test 3',
slug='test-3',
author=author,
content='Hello World!',
pub_date=now)
self.announcement_published_in_future = Announcement.objects.create(title='Test 4',
slug='test-4',
author=author,
content='Hello World!',
pub_date=future_now)
self.tag = AnnouncementTag.objects.create(name='Test tag', slug='test-tag')
self.tag2 = AnnouncementTag.objects.create(name='Test tag 2', slug='test-tag-2')
self.announcement_unpublished.tags.add(self.tag)
self.announcement_published.tags.add(self.tag)
self.announcement_published_in_future.tags.add(self.tag)
def test_announcement_list_view_available(self):
"""
Test the availability of the "announcement list" view.
"""
client = Client()
response = client.get(reverse('announcements:index'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'announcements/announcement_list.html')
self.assertIn('announcements', response.context)
self.assertQuerysetEqual(response.context['announcements'], ['<Announcement: Test 3>',
'<Announcement: Test 2>'])
def test_announcement_detail_view_available_with_published_announcement(self):
"""
Test the availability of the "announcement detail" view for a published announcement.
"""
client = Client()
response = client.get(self.announcement_published.get_absolute_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'announcements/announcement_detail.html')
self.assertIn('announcement', response.context)
self.assertEqual(response.context['announcement'], self.announcement_published)
def test_announcement_detail_view_unavailable_with_unpublished_announcement(self):
"""
Test the unavailability of the "announcement detail" view for an unpublished announcement.
"""
client = Client()
response = client.get(self.announcement_unpublished.get_absolute_url())
self.assertEqual(response.status_code, 404)
def test_announcement_preview_available_with_unpublished_announcement_if_authorized(self):
"""
Test the availability of the "announcement preview" view for an unpublished announcement if the
current user is authorized to see the preview.
"""
client = Client()
client.login(username='johndoe', password='illpassword')
response = client.get(self.announcement_unpublished.get_absolute_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'announcements/announcement_detail.html')
self.assertIn('announcement', response.context)
self.assertEqual(response.context['announcement'], self.announcement_unpublished)
def test_announcement_detail_view_unavailable_with_published_in_future_announcement(self):
"""
Test the availability of the "announcement detail" view for a published in future announcement.
"""
client = Client()
response = client.get(self.announcement_published_in_future.get_absolute_url())
self.assertEqual(response.status_code, 404)
def test_announcement_preview_available_with_published_in_future_announcement_if_authorized(self):
"""
Test the availability of the "announcement preview" view for a published in future announcement if the
current user is authorized to see the preview.
"""
client = Client()
client.login(username='johndoe', password='illpassword')
response = client.get(self.announcement_published_in_future.get_absolute_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'announcements/announcement_detail.html')
self.assertIn('announcement', response.context)
self.assertEqual(response.context['announcement'], self.announcement_published_in_future)
def test_latest_announcements_rss_feed_available(self):
"""
Test the availability of the "latest announcements" rss feed view.
"""
client = Client()
response = client.get(reverse('announcements:latest_announcements_rss'))
self.assertEqual(response.status_code, 200)
def test_latest_announcements_atom_feed_available(self):
"""
Test the availability of the "latest announcements" atom feed" view.
"""
client = Client()
response = client.get(reverse('announcements:latest_announcements_atom'))
self.assertEqual(response.status_code, 200)
def test_announcement_tag_list_view_available(self):
"""
Test the availability of the "announcement tag list" view.
"""
client = Client()
response = client.get(reverse('announcements:tag_list'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'announcements/tag_list.html')
self.assertIn('tags', response.context)
self.assertEqual(str(response.context['tags']), str([self.tag, self.tag2]))
def test_announcement_tag_detail_view_available(self):
"""
Test the availability of the "announcement tag detail" view.
"""
client = Client()
response = client.get(reverse('announcements:tag_detail', kwargs={'slug': self.tag.slug}))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'announcements/tag_detail.html')
self.assertIn('tag', response.context)
self.assertEqual(response.context['tag'], self.tag)
self.assertIn('related_announcements', response.context)
self.assertQuerysetEqual(response.context['related_announcements'], ['<Announcement: Test 3>'])
def test_latest_tag_announcements_rss_feed_available(self):
"""
Test the availability of the "latest announcements for tag" rss feed view.
"""
client = Client()
response = client.get(reverse('announcements:latest_tag_announcements_rss', kwargs={'slug': self.tag.slug}))
self.assertEqual(response.status_code, 200)
def test_latest_tag_announcements_atom_feed_available(self):
"""
Test the availability of the "latest announcements for tag" atom feed" view.
"""
client = Client()
response = client.get(reverse('announcements:latest_tag_announcements_atom', kwargs={'slug': self.tag.slug}))
self.assertEqual(response.status_code, 200)
| agpl-3.0 | -3,132,277,509,876,051,500 | 49.76 | 117 | 0.589778 | false | 4.770677 | true | false | false |
StuJ/collator | config/settings/local.py | 1 | 2265 | # -*- coding: utf-8 -*-
"""
Local settings
- Run in Debug mode
- Use console backend for emails
- Add Django Debug Toolbar
- Add django-extensions as app
"""
import socket
import os
from .common import * # noqa
# DEBUG
# ------------------------------------------------------------------------------
DEBUG = env.bool('DJANGO_DEBUG', default=True)
TEMPLATES[0]['OPTIONS']['debug'] = DEBUG
# SECRET CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key only used for development and testing.
SECRET_KEY = env('DJANGO_SECRET_KEY', default='edf!yibz*vbd#%jrx^h!4xj*!axs107^sr9-8q%_8po@hwixa@')
# Mail settings
# ------------------------------------------------------------------------------
EMAIL_PORT = 1025
EMAIL_HOST = 'localhost'
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND',
default='django.core.mail.backends.console.EmailBackend')
# CACHING
# ------------------------------------------------------------------------------
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': ''
}
}
# django-debug-toolbar
# ------------------------------------------------------------------------------
MIDDLEWARE += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INSTALLED_APPS += ('debug_toolbar', )
INTERNAL_IPS = ['127.0.0.1', '10.0.2.2', '0.0.0.0', 'localhost' ]
# tricks to have debug toolbar when developing with docker
if os.environ.get('USE_DOCKER') == 'yes':
ip = socket.gethostbyname(socket.gethostname())
INTERNAL_IPS += [ip[:-1] + "1"]
DEBUG_TOOLBAR_CONFIG = {
'DISABLE_PANELS': [
'debug_toolbar.panels.redirects.RedirectsPanel',
],
'SHOW_TEMPLATE_CONTEXT': True,
}
# django-extensions
# ------------------------------------------------------------------------------
INSTALLED_APPS += ('django_extensions', )
# TESTING
# ------------------------------------------------------------------------------
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
# Your local stuff: Below this line define 3rd party library settings
# ------------------------------------------------------------------------------
| mit | -5,780,646,828,236,686,000 | 29.608108 | 99 | 0.485651 | false | 4.148352 | false | false | false |
deepak02/rasa_core | rasa_core/policies/policy.py | 1 | 2298 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import numpy as np
from builtins import object
from numpy.core.records import ndarray
from typing import Any
from typing import List
from typing import Optional
from typing import Text
from rasa_core.domain import Domain
from rasa_core.featurizers import Featurizer
from rasa_core.trackers import DialogueStateTracker
logger = logging.getLogger(__name__)
class Policy(object):
SUPPORTS_ONLINE_TRAINING = False
MAX_HISTORY_DEFAULT = 3
def __init__(self, featurizer=None, max_history=None):
# type: (Optional[Featurizer]) -> None
self.featurizer = featurizer
self.max_history = max_history
def featurize(self, tracker, domain):
# type: (DialogueStateTracker, Domain) -> ndarray
"""Transform tracker into a vector representation.
The tracker, consisting of multiple turns, will be transformed
into a float vector which can be used by a ML model."""
x = domain.feature_vector_for_tracker(self.featurizer, tracker,
self.max_history)
return np.array(x)
def predict_action_probabilities(self, tracker, domain):
# type: (DialogueStateTracker, Domain) -> List[float]
return []
def prepare(self, featurizer, max_history):
self.featurizer = featurizer
self.max_history = max_history
def train(self, X, y, domain, **kwargs):
# type: (ndarray, List[int], Domain, **Any) -> None
"""Trains the policy on given training data."""
raise NotImplementedError
def continue_training(self, X, y, domain, **kwargs):
"""Continues training an already trained policy.
This doesn't need to be supported by every policy. If it is supported,
the policy can be used for online training and the implementation for
the continued training should be put into this function."""
pass
def persist(self, path):
# type: (Text) -> None
"""Persists the policy to storage."""
pass
@classmethod
def load(cls, path, featurizer, max_history):
raise NotImplementedError
| apache-2.0 | -5,139,038,974,284,616,000 | 29.64 | 78 | 0.667102 | false | 4.263451 | false | false | false |
pagea/unstyle | unstyle/gui/unstyle_frontend.py | 1 | 9496 | from PyQt5.QtCore import pyqtSlot, pyqtSignal
from PyQt5.QtWidgets import QApplication, QMainWindow, QFileDialog
from PyQt5.QtWidgets import QTableWidgetItem, QHeaderView
from PyQt5 import QtGui
from unstyle.gui.unstyle_auto import Ui_Unstyle
import unstyle.controller
class Unstyle(QMainWindow):
def __init__(self, parent=None):
# Initialized the generated interface code.
super(Unstyle, self).__init__(parent)
self.ui = Ui_Unstyle()
self.ui.setupUi(self)
self.featureRows = {}
self.setWindowTitle("Unstyle")
# Signal connections
self.ui.stackedNext.clicked.connect(self.stackNext_clicked)
self.ui.browseYourDoc.clicked.connect(self.browseYourDoc_clicked)
self.ui.browseYourDocs.clicked.connect(self.browseYourDocs_clicked)
self.ui.deleteYourDocs.clicked.connect(self.deleteYourDocs_clicked)
self.ui.textEdit.textChanged.connect(self.refreshAnonymity)
self.ui.rankTable.selectionModel().selectionChanged.connect(
self.row_highlighted)
self.ui.saveDoc.clicked.connect(self.saveDoc_clicked)
def getFeatureDesc(self, functionName):
"""Translate feature extractor names into something that the end user
can understand.
:param functionName: A feature extracting function.
:returns: A typle containing ("Feature Name", "Description of feature").
"""
names = {
"letterSpace": (
"Letter Space",
("The total number of letters appearing in your "
"document.")),
"gunningFog": (
"Gunning-Fog readability",
("A function related to "
"the ratio of words/sentences and complex word/total words.")),
"avgSyllablesPerWord": (
"Average syllables per word",
("The total "
"number of syllables/the total number of words.")),
"unique_words": (
"Unique words",
("The number of words that appear "
"only once in your document.")),
"sentenceCount": (
"Sentence count",
("The number of sentences in your document.")),
"characterSpace": (
"Character space",
("The total number of "
"characters (letters and numbers) appearing in your document.")),
"avgSentenceLength": (
"Average sentence length",
("The average "
"length of sentences in your document.")),
"complexity": (
"Complexity",
("The ratio of unique words to total"
"words in your document.")),
"fleschReadingEase": (
"Flesch readability",
("A function related to"
" the ratio of words/sentences and syllables/words."))}
return names[functionName]
# stackedWidget buttons
def stackNext_clicked(self):
# Go to the next screen.
self.ui.stackedWidget.setCurrentIndex(1)
# Tell the controller to train its classifier.
unstyle.controller.readyToClassify()
def browseYourDoc_clicked(self):
filename = QFileDialog.getOpenFileName()
unstyle.controller.document_to_anonymize_path = filename[0]
self.ui.yourdoc.setText(filename[0])
unstyle.controller.document_to_anonymize = unstyle.controller.load_document(
filename[0])
# Show the text of the document in the text editor and enable it.
self.ui.textEdit.setText(unstyle.controller.document_to_anonymize)
self.ui.textEdit.setEnabled(True)
def browseYourDocs_clicked(self):
filenames = QFileDialog.getOpenFileNames()
if filenames is not '':
for path in filenames[0]:
unstyle.controller.other_user_documents_paths.append(path)
self.ui.otherdocslist.addItem(path)
def deleteYourDocs_clicked(self):
selected = self.ui.otherdocslist.currentItem()
# Make sure the user selected a document before trying to delete
# anything
if selected is not None:
row = self.ui.otherdocslist.currentRow()
unstyle.controller.other_user_documents_paths.remove(
selected.text())
self.ui.otherdocslist.takeItem(row)
else:
pass
def saveDoc_clicked(self):
"""Save the current state of the text editor to a file defined by the
user.
"""
# Open a save dialog
filename = QFileDialog.getSaveFileName()
if filename is not None:
with open(filename, 'w+') as file:
file.write(str(textEdit.toPlainText()))
# TODO: Rather than check anonymity every time the user changes the text,
# have a separate thread check every 5 or 10 seconds. Otherwise, we're going
# to be constantly locking up the interface when we use large featuresets.
def refreshAnonymity(self):
"""Called whenever the user changes the text editor.
"""
# Make sure we've trained the classifier before trying to do any
# predictions.
if unstyle.controller.trained_classifier is None:
return 0
anonymity = unstyle.controller.checkAnonymity(
self.ui.textEdit.toPlainText())
if anonymity is 0:
self.ui.anonIcon.setPixmap(QtGui.QPixmap(":/icons/img/x.png"))
self.ui.anonStatus.setText(
("It is still possible to identify you as the "
"author. Continue changing your document."))
if anonymity is 1:
self.ui.anonIcon.setPixmap(QtGui.QPixmap(":/icons/img/w.png"))
self.ui.anonStatus.setText(
("Although you are not the most likely author,"
" there is a statistically significant chance"
" that you wrote the document. Continue"
" changing your document."))
if anonymity is 2:
self.ui.anonIcon.setPixmap(QtGui.QPixmap(":/icons/img/check.png"))
self.ui.anonStatus.setText(
("Congratulations! It appears that your"
" document is no longer associated with your"
" identity."))
def row_highlighted(self, _, __):
"""Every time someone selects a row from the table, we update our
description box with the description of the feature.
"""
selected = self.ui.rankTable.selectionModel().selectedRows()[0].row()
featureHighlighted = self.featureRows[selected]
# Display the description of the highlighted feature
self.ui.featureDescription.setText(
self.getFeatureDesc(featureHighlighted)[1])
# Controller messages
def update_stats(self):
self.refreshAnonymity()
# Set up rank table dimensions
self.ui.rankTable.setRowCount(len(unstyle.controller.feature_ranks))
# Name the headers of the table
headers = "Text Features", "Target", "Initial"
self.ui.rankTable.setHorizontalHeaderLabels(headers)
headerObj = self.ui.rankTable.horizontalHeader()
headerObj.setSectionResizeMode(0, QHeaderView.ResizeToContents)
tableHeight = (len(unstyle.controller.feature_ranks))
# XXX: Sorting should be handled in the table, not in the
# rank_features methods. This will allow us to fix this embarrassingly
# overcomplicated code.
# Fill in the feature column
for idx, pair in enumerate(unstyle.controller.feature_ranks):
currItem = self.ui.rankTable.item(idx, 0)
# If we are setting up the table for the first time, currItem will
# not exist.
if currItem is None:
currItem = QTableWidgetItem(1)
currItem.setText(self.getFeatureDesc(pair[0])[0])
self.ui.rankTable.setItem(idx, 0, currItem)
else:
currItem.setText(
self.getFeatureDesc(feature_ranks[pair[0]])[0])
# Initialize target and initial columns
for idx, target in enumerate(unstyle.controller.targets):
currItem = self.ui.rankTable.item(idx, 1)
if currItem is None:
currItem = QTableWidgetItem(1)
currItem.setText(str(target))
self.ui.rankTable.setItem(idx, 1, currItem)
currItem2 = QTableWidgetItem(1)
self.ui.rankTable.setItem(idx, 2, currItem2)
# Populate target and current val columns
# Track feature table locations
labelsBeforeSorting = unstyle.controller.featlabels
for idx, label in enumerate(labelsBeforeSorting):
for idx2, item in enumerate(range(tableHeight)):
currItem = self.ui.rankTable.item(item, 0)
if self.getFeatureDesc(label)[0] == currItem.text():
self.featureRows[idx2] = label
print(label, " ", currItem.text(), " ", item)
currItem = self.ui.rankTable.item(item, 1)
currItem.setText(str(unstyle.controller.targets[idx]))
currItem = self.ui.rankTable.item(item, 2)
currItem.setText(
str(unstyle.controller.to_anonymize_features[0][idx]))
| mit | 2,554,661,328,254,949,000 | 43.792453 | 84 | 0.607098 | false | 4.369995 | false | false | false |
songjun54cm/MachineLearningPy | NeuralNetwork/Layers/layer_activation.py | 1 | 1191 | import numpy as np
from layer_example import Layer
class Activation(Layer):
def __init__(self, type):
if type=='sigmod':
self.fun = self.sigmoid
self.fun_d = self.sigmoid_d
elif type == 'relu':
self.fun = self.relu
self.fun_d = self.relu_d
elif type == 'tanh':
self.fun = self.tanh
self.fun_d = self.tanh_d
else:
raise ValueError('Invalid activation function.')
def sigmoid(self, x):
return 1.0/(1.0+np.exp(-x))
def sigmoid_d(self, x):
s = self.sigmoid(x)
return s*(1.0-s)
def tanh(self, x):
return np.tanh(x)
def tanh_d(self, x):
e = np.exp(2*x)
return (e-1)/(e+1)
def relu(self, x):
return np.maximum(0.0, x)
def relu_d(self, x):
dx = np.zeros(x.shape)
dx[x >= 0] = 1
return dx
def fprop(self, input_data):
self.last_input_data = input_data
return self.fun(input_data)
def bprop(self, output_grad):
return output_grad * self.fun_d(self.last_input_data)
def get_output_shape(self, input_shape):
return input_shape | gpl-2.0 | -2,508,791,994,177,604,000 | 24.361702 | 61 | 0.533165 | false | 3.308333 | false | false | false |
goirijo/thermoplotting | thermoplotting/xtals/lattice.py | 1 | 12145 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from builtins import zip
from builtins import range
from builtins import object
from ..ternary import normal
from ..misc import *
import itertools
from mpl_toolkits.mplot3d.art3d import Poly3DCollection
from mpl_toolkits.mplot3d.art3d import Line3DCollection
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import proj3d
import numpy as np
from scipy.spatial import Voronoi, ConvexHull
import warnings
def to_cartesian(vecmat,points):
"""Given lattice vectors and points in fractional coordinates,
convert the points to Cartesian
:vecmat: np array (lattice vectors as columns)
:points: np array (vertically stacked coordinates)
:returns: np array
"""
return np.dot(vecmat,points.T).T
def to_fractional(vecmat,points):
"""Given lattice vectors and points in Cartesian coordinates,
convert the points to fractional
:vecmat: np array (lattice vectors as columns)
:points: np array (vertically stacked coordinates)
:returns: np array
"""
return np.dot(np.linalg.inv(vecmat),points.T).T
def simplex_bin(hull):
"""Given a convex hull, check the equations of the
hyperplanes and bin them, returning a set of simplex
groups with a common equation (i.e. coplanar simplices)
:hull: convex hull
:returns: array of array of simplex
"""
equations=np.vstack({tuple(q) for q in hull.equations})
binned=[[] for q in equations]
for q,s in zip(hull.equations,hull.simplices):
#whichever row is zero has the same equation as the current simplex
single_zero_row=equations-q
index=np.where((single_zero_row==0).all(axis=1))[0]
assert(index.shape==(1,))
index=index[0]
binned[index].append(s)
return [np.unique(a) for a in binned]
def signed_angle_3d(v0,v1,vn):
"""Get the signed angle for two vectors in 3d space.
:v0: np vector
:v1: np vector
:vn: np vector (normal vector)
:returns: float (rad)
"""
v0n=v0/np.linalg.norm(v0)
v1n=v1/np.linalg.norm(v1)
#Avoid float point pain with 8 decimal places. Close enough
angle=np.arccos(round(np.dot(v0n,v1n),8))
cross=np.cross(v0,v1)
if np.dot(vn, cross) < 0:
angle=-angle
return angle
def polygonal_sort(points):
"""Given a set of points that define a polygon,
sort them so
that they all go around the center in order.
:points: np array
:returns: np array
"""
n=normal(points[0:3])
c=np.sum(points,axis=0)/len(points)
ref=points[0]-c
angles=np.array([signed_angle_3d(c-p,ref,n) for p in points])
sortinds=np.argsort(angles)
return points[sortinds]
def polygon_facet_center(points):
"""Given a set of points that define a polygon,
find the center of the polygon
:points: np array
:returns: np array
"""
center=np.average(points,axis=0)
return center
def polygon_edge_centers(points):
"""Given a set of points that define a polygon,
find the centers of the edges.
:points: np array
:returns: np array
"""
rolled=np.roll(points,1,axis=0)
centers=(rolled+points)/2
return centers
def reciprocal_lattice(latmat):
"""Cross vectors and multiply by 2 pi to get the
reciprocal of the given lattice
:latmat: np 3x3 (vectors in columns)
:returns: np 3x3
"""
a,b,c=latmat.T
vol=np.dot(a,np.cross(b,c))
astar=2*np.pi*np.cross(b,c)/vol
bstar=2*np.pi*np.cross(c,a)/vol
cstar=2*np.pi*np.cross(a,b)/vol
return np.array([astar,bstar,cstar]).T
def wigner_seitz_points(latmat):
"""Determine the edges of the Wigner Seitz cell, given the lattice.
Generates just enough lattice points to generate a single WS cell,
then selects points from the only full region.
If the reciprocal lattice is given, then the points define the first Brillouin
zone.
:latmat: 3x3 vectors as columns
:returns: np list of points (as rows)
"""
a,b,c=latmat.T
#Range of lattice points that will be enough to enclose the Weigner Seitz cell
radpoints=list(range(-1,2))
counterpoints=[(x,y,z) for x in radpoints for y in radpoints for z in radpoints]
gridpoints=np.array([x*a+y*b+z*c for x,y,z in counterpoints])
#Construct Voronoi cell
vor=Voronoi(gridpoints,furthest_site=False)
vorpoints=vor.vertices
vorregions=vor.regions
#Only one full Voronoi cell should have been constructed
goodregions=[x for x in vorregions if len(x) > 0 and x[0] is not -1]
if len(goodregions)!=1:
warnings.warn("Could not isolate a single Voronoi cell! Results may be wonky.")
return vorpoints[goodregions[-1]]
def wigner_seitz_facets(latmat):
"""Returns a list of polygons corresponding to the Weigner Seitz cell
:returns: Poly3DCollection
"""
vorpoints=wigner_seitz_points(latmat)
ch=ConvexHull(vorpoints)
binned=simplex_bin(ch)
polygons=[polygonal_sort(ch.points[b]) for b in binned]
return polygons
def draw_voronoi_cell(vectormat,ax,alpha):
"""Plot the Voronoi cell using the given lattice
:vectormat: Either the real or reciprocal lattice
:ax: matplotlib subplot
:returns: ax
"""
norms=np.linalg.norm(vectormat,axis=0)
maxrange=np.amax(norms)
polygons=wigner_seitz_facets(vectormat)
ax.add_collection(Poly3DCollection(polygons,facecolors='w',linewidth=2,alpha=alpha,zorder=0))
ax.add_collection(Line3DCollection(polygons,colors='k',linewidth=0.8, linestyles=':'))
ax.set_xlim([-maxrange,maxrange])
ax.set_ylim([-maxrange,maxrange])
ax.set_zlim([-maxrange,maxrange])
return ax
def voronoi_facet_centers(vectormat, fractional=True):
"""Calculate the centers of facets of either the brillouin zone,
or Wigner Seitz cell, depending on the given vectormat
:vectormat: Either the real or reciprocal lattice
:fractional: bool
:returns: np array
"""
polygons=wigner_seitz_facets(vectormat)
centers=np.stack([polygon_facet_center(p) for p in polygons])
if fractional:
centers=to_fractional(vectormat,centers)
return centers
def voronoi_edge_centers(vectormat, fractional=True):
"""Calculate the centers of the edges of either the brillouin zone,
or Wigner Seitz cell, depending on the given vectormat
:vectormat: Either the real or reciprocal lattice
:fractional: bool
:returns: np array
"""
polygons=wigner_seitz_facets(vectormat)
for p in polygons:
print(polygon_edge_centers(p))
centers=np.concatenate([polygon_edge_centers(p) for p in polygons],axis=0)
if fractional:
centers=to_fractional(vectormat,centers)
return np.vstack({tuple(row) for row in centers})
def voronoi_vertexes(vectormat, fractional=True):
"""Get the coordinates of the corners/vertexes of the brillouin zone
:vectormat: Either the real or reciprocal lattice
:fractional: bool
:returns: np array
"""
polygons=wigner_seitz_facets(vectormat)
points=np.concatenate(polygons,axis=0)
return np.vstack({tuple(row) for row in points})
class Lattice(object):
"""Simple class to hold the lattice vectors of a lattice, with
a few routines to do things in reciprocal space"""
def __init__(self, a, b, c):
"""Define the lattice with three lattice vectors, stored
vertically in a matrix
:a: 3x1
:b: 3x1
:c: 3x1
"""
self._latmat=np.array([a,b,c]).T
self._recipmat=reciprocal_lattice(self._latmat)
def real_to_cartesian(self, points):
"""Convert a list of fractional coordinates into Cartesian
for the real lattice
:points: np array (vertically stacked coordinates)
:returns: np array
"""
return to_cartesian(self._latmat,points)
def real_to_fractional(self, points):
"""Convert a list of Cartesian coordinates into fractional
for the real lattice
:points: np array (vertically stacked coordinates)
:returns: np array
"""
return to_fractional(self._latmat,points)
def reciprocal_to_cartesian(self, points):
"""Convert a list of fractional coordinates into Cartesian
for the reciprocal lattice
:points: np array (vertically stacked coordinates)
:returns: np array
"""
return to_cartesian(self._recipmat,points)
def reciprocal_to_fractional(self, points):
"""Convert a list of Cartesian coordinates into fractional
for the reciprocal lattice
:points: np array (vertically stacked coordinates)
:returns: np array
"""
return to_fractional(self._recipmat,points)
def draw_wigner_seitz_cell(self,ax,alpha=1):
"""Plot the Wigner Seitz cell of the lattice
(Voronoi of real lattice)
:ax: matplotlib subplot
:returns: ax
"""
return self._draw_voronoi_cell(self._latmat,ax,alpha)
def draw_brillouin_zone(self,ax,alpha=1):
"""Plot the first Brillouin zone in reciprocal space
(Voronoi of reciprocal lattice)
:ax: matplotlib subplot
:returns: ax
"""
return draw_voronoi_cell(self._recipmat,ax,alpha)
def brillouin_facet_centers(self,fractional=True):
"""Calculate the center of all facets of the brillouin zone
:returns: np array
"""
return voronoi_facet_centers(self._recipmat,fractional)
def brillouin_edge_centers(self,fractional=True):
"""Calculate the center of all facets of the brillouin zone
:returns: np array
"""
return voronoi_edge_centers(self._recipmat,fractional)
def brillouin_vertexes(self,fractional=True):
"""Get the coordinates of the vertexes of the brillouin zone
:returns: np array
"""
return voronoi_vertexes(self._recipmat,fractional)
def draw_real_vectors(self, ax):
"""Draw the real lattice vectors
:ax: matplotlib subplot
:returns: ax
"""
for v,color in zip(self._latmat.T,['r','g','b']):
arr=Arrow3D([0,v[0]],[0,v[1]],[0,v[2]],lw=3,arrowstyle="-|>",mutation_scale=20,color=color,linestyle="-")
ax.add_artist(arr)
return ax
def draw_reciprocal_vectors(self, ax):
"""Draw the reciprocal lattice vectors
:ax: matplotlib subplot
:returns: ax
"""
for v,color in zip(self._recipmat.T,['r','g','b']):
arr=Arrow3D([0,v[0]],[0,v[1]],[0,v[2]],lw=3,arrowstyle="-|>",mutation_scale=20,color=color,linestyle="--")
ax.add_artist(arr)
return ax
def angles(self, rad=True, reciprocal=False):
"""Return the value of alpha, beta and gamma, i.e. the angles
between the lattice vectors.
:returns: (float,float,float)
"""
if not reciprocal:
a,b,c=self._latmat.T
else:
a,b,c=self._recipmat.T
alpha=angle_between(b,c)
beta=angle_between(c,a)
gamma=angle_between(a,b)
if not rad:
alpha=alpha*180/np.pi
beta=beta*180/np.pi
gamma=gamma*180/np.pi
return alpha,beta,gamma
def lengths(self,reciprocal=False):
"""Return the length of each lattice vector
:returns: TODO
"""
if not reciprocal:
a,b,c=self._latmat.T
else:
a,b,c=self._recipmat.T
al=np.linalg.norm(a)
bl=np.linalg.norm(b)
cl=np.linalg.norm(c)
return al,bl,cl
def column_lattice(self):
"""Return the lattice as column vectors in a matrix
Returns
-------
np.array 3x3
"""
return self._latmat
def row_lattice(self):
"""Return the lattice as row vectors in a matrix
Returns
-------
np.array 3x3
"""
return self._latmat.T
if __name__ == "__main__":
main()
| mit | -3,046,888,105,209,656,000 | 26.048998 | 118 | 0.646521 | false | 3.47397 | false | false | false |
TribeMedia/synapse | synapse/metrics/metric.py | 2 | 6074 | # -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from itertools import chain
# TODO(paul): I can't believe Python doesn't have one of these
def map_concat(func, items):
# flatten a list-of-lists
return list(chain.from_iterable(map(func, items)))
class BaseMetric(object):
def __init__(self, name, labels=[]):
self.name = name
self.labels = labels # OK not to clone as we never write it
def dimension(self):
return len(self.labels)
def is_scalar(self):
return not len(self.labels)
def _render_labelvalue(self, value):
# TODO: some kind of value escape
return '"%s"' % (value)
def _render_key(self, values):
if self.is_scalar():
return ""
return "{%s}" % (
",".join(["%s=%s" % (k, self._render_labelvalue(v))
for k, v in zip(self.labels, values)])
)
class CounterMetric(BaseMetric):
"""The simplest kind of metric; one that stores a monotonically-increasing
integer that counts events."""
def __init__(self, *args, **kwargs):
super(CounterMetric, self).__init__(*args, **kwargs)
self.counts = {}
# Scalar metrics are never empty
if self.is_scalar():
self.counts[()] = 0
def inc_by(self, incr, *values):
if len(values) != self.dimension():
raise ValueError(
"Expected as many values to inc() as labels (%d)" % (self.dimension())
)
# TODO: should assert that the tag values are all strings
if values not in self.counts:
self.counts[values] = incr
else:
self.counts[values] += incr
def inc(self, *values):
self.inc_by(1, *values)
def render_item(self, k):
return ["%s%s %d" % (self.name, self._render_key(k), self.counts[k])]
def render(self):
return map_concat(self.render_item, sorted(self.counts.keys()))
class CallbackMetric(BaseMetric):
"""A metric that returns the numeric value returned by a callback whenever
it is rendered. Typically this is used to implement gauges that yield the
size or other state of some in-memory object by actively querying it."""
def __init__(self, name, callback, labels=[]):
super(CallbackMetric, self).__init__(name, labels=labels)
self.callback = callback
def render(self):
value = self.callback()
if self.is_scalar():
return ["%s %.12g" % (self.name, value)]
return ["%s%s %.12g" % (self.name, self._render_key(k), value[k])
for k in sorted(value.keys())]
class DistributionMetric(object):
"""A combination of an event counter and an accumulator, which counts
both the number of events and accumulates the total value. Typically this
could be used to keep track of method-running times, or other distributions
of values that occur in discrete occurances.
TODO(paul): Try to export some heatmap-style stats?
"""
def __init__(self, name, *args, **kwargs):
self.counts = CounterMetric(name + ":count", **kwargs)
self.totals = CounterMetric(name + ":total", **kwargs)
def inc_by(self, inc, *values):
self.counts.inc(*values)
self.totals.inc_by(inc, *values)
def render(self):
return self.counts.render() + self.totals.render()
class CacheMetric(object):
__slots__ = ("name", "cache_name", "hits", "misses", "size_callback")
def __init__(self, name, size_callback, cache_name):
self.name = name
self.cache_name = cache_name
self.hits = 0
self.misses = 0
self.size_callback = size_callback
def inc_hits(self):
self.hits += 1
def inc_misses(self):
self.misses += 1
def render(self):
size = self.size_callback()
hits = self.hits
total = self.misses + self.hits
return [
"""%s:hits{name="%s"} %d""" % (self.name, self.cache_name, hits),
"""%s:total{name="%s"} %d""" % (self.name, self.cache_name, total),
"""%s:size{name="%s"} %d""" % (self.name, self.cache_name, size),
]
class MemoryUsageMetric(object):
"""Keeps track of the current memory usage, using psutil.
The class will keep the current min/max/sum/counts of rss over the last
WINDOW_SIZE_SEC, by polling UPDATE_HZ times per second
"""
UPDATE_HZ = 2 # number of times to get memory per second
WINDOW_SIZE_SEC = 30 # the size of the window in seconds
def __init__(self, hs, psutil):
clock = hs.get_clock()
self.memory_snapshots = []
self.process = psutil.Process()
clock.looping_call(self._update_curr_values, 1000 / self.UPDATE_HZ)
def _update_curr_values(self):
max_size = self.UPDATE_HZ * self.WINDOW_SIZE_SEC
self.memory_snapshots.append(self.process.memory_info().rss)
self.memory_snapshots[:] = self.memory_snapshots[-max_size:]
def render(self):
if not self.memory_snapshots:
return []
max_rss = max(self.memory_snapshots)
min_rss = min(self.memory_snapshots)
sum_rss = sum(self.memory_snapshots)
len_rss = len(self.memory_snapshots)
return [
"process_psutil_rss:max %d" % max_rss,
"process_psutil_rss:min %d" % min_rss,
"process_psutil_rss:total %d" % sum_rss,
"process_psutil_rss:count %d" % len_rss,
]
| apache-2.0 | 8,878,155,030,509,417,000 | 30.148718 | 86 | 0.607837 | false | 3.765654 | false | false | false |
pulkitag/mujoco140-py | mujoco_py/mjtypes.py | 1 | 238632 |
# AUTO GENERATED. DO NOT CHANGE!
from ctypes import *
import numpy as np
class MJCONTACT(Structure):
_fields_ = [
("dist", c_double),
("pos", c_double * 3),
("frame", c_double * 9),
("includemargin", c_double),
("friction", c_double * 5),
("solref", c_double * 2),
("solimp", c_double * 3),
("mu", c_double),
("coef", c_double * 5),
("zone", c_int),
("dim", c_int),
("geom1", c_int),
("geom2", c_int),
("exclude", c_int),
("efc_address", c_int),
]
class MJRRECT(Structure):
_fields_ = [
("left", c_int),
("bottom", c_int),
("width", c_int),
("height", c_int),
]
class MJVGEOM(Structure):
_fields_ = [
("type", c_int),
("dataid", c_int),
("objtype", c_int),
("objid", c_int),
("category", c_int),
("texid", c_int),
("texuniform", c_int),
("texrepeat", c_float * 2),
("size", c_float * 3),
("pos", c_float * 3),
("mat", c_float * 9),
("rgba", c_float * 4),
("emission", c_float),
("specular", c_float),
("shininess", c_float),
("reflectance", c_float),
("label", c_char * 100),
("camdist", c_float),
("rbound", c_float),
("transparent", c_ubyte),
]
class MJVSCENE(Structure):
_fields_ = [
("maxgeom", c_int),
("ngeom", c_int),
("geoms", POINTER(MJVGEOM)),
("geomorder", POINTER(c_int)),
("nlight", c_int),
("lights", MJVLIGHT * 8),
("camera", MJVGLCAMERA * 2),
("enabletransform", c_ubyte),
("translate", c_float * 3),
("rotate", c_float * 4),
("scale", c_float),
("stereo", c_int),
("flags", c_ubyte * 5),
]
class MJVPERTURB(Structure):
_fields_ = [
("select", c_int),
("active", c_int),
("refpos", c_double * 3),
("refquat", c_double * 4),
("localpos", c_double * 3),
("scale", c_double),
]
class MJRCONTEXT(Structure):
_fields_ = [
("lineWidth", c_float),
("shadowClip", c_float),
("shadowScale", c_float),
("shadowSize", c_int),
("offWidth", c_int),
("offHeight", c_int),
("offSamples", c_int),
("offFBO", c_uint),
("offFBO_r", c_uint),
("offColor", c_uint),
("offColor_r", c_uint),
("offDepthStencil", c_uint),
("offDepthStencil_r", c_uint),
("shadowFBO", c_uint),
("shadowTex", c_uint),
("ntexture", c_int),
("textureType", c_int * 100),
("texture", c_int * 100),
("basePlane", c_uint),
("baseMesh", c_uint),
("baseHField", c_uint),
("baseBuiltin", c_uint),
("baseFontNormal", c_uint),
("baseFontShadow", c_uint),
("baseFontBig", c_uint),
("rangePlane", c_int),
("rangeMesh", c_int),
("rangeHField", c_int),
("rangeBuiltin", c_int),
("rangeFont", c_int),
("charWidth", c_int * 127),
("charWidthBig", c_int * 127),
("charHeight", c_int),
("charHeightBig", c_int),
("glewInitialized", c_int),
("windowAvailable", c_int),
("windowSamples", c_int),
("windowStereo", c_int),
("windowDoublebuffer", c_int),
("currentBuffer", c_int),
]
class MJVCAMERA(Structure):
_fields_ = [
("type", c_int),
("fixedcamid", c_int),
("trackbodyid", c_int),
("lookat", c_double * 3),
("distance", c_double),
("azimuth", c_double),
("elevation", c_double),
]
class MJVOPTION(Structure):
_fields_ = [
("label", c_int),
("frame", c_int),
("geomgroup", c_ubyte * 5),
("sitegroup", c_ubyte * 5),
("flags", c_ubyte * 18),
]
class MJVGEOM(Structure):
_fields_ = [
("type", c_int),
("dataid", c_int),
("objtype", c_int),
("objid", c_int),
("category", c_int),
("texid", c_int),
("texuniform", c_int),
("texrepeat", c_float * 2),
("size", c_float * 3),
("pos", c_float * 3),
("mat", c_float * 9),
("rgba", c_float * 4),
("emission", c_float),
("specular", c_float),
("shininess", c_float),
("reflectance", c_float),
("label", c_char * 100),
("camdist", c_float),
("rbound", c_float),
("transparent", c_ubyte),
]
class MJVLIGHT(Structure):
_fields_ = [
("pos", c_float * 3),
("dir", c_float * 3),
("attenuation", c_float * 3),
("cutoff", c_float),
("exponent", c_float),
("ambient", c_float * 3),
("diffuse", c_float * 3),
("specular", c_float * 3),
("headlight", c_ubyte),
("directional", c_ubyte),
("castshadow", c_ubyte),
]
class MJOPTION(Structure):
_fields_ = [
("timestep", c_double),
("apirate", c_double),
("tolerance", c_double),
("impratio", c_double),
("gravity", c_double * 3),
("wind", c_double * 3),
("magnetic", c_double * 3),
("density", c_double),
("viscosity", c_double),
("o_margin", c_double),
("o_solref", c_double * 2),
("o_solimp", c_double * 3),
("mpr_tolerance", c_double),
("mpr_iterations", c_int),
("integrator", c_int),
("collision", c_int),
("impedance", c_int),
("reference", c_int),
("solver", c_int),
("iterations", c_int),
("disableflags", c_int),
("enableflags", c_int),
]
class MJVISUAL(Structure):
class ANON_GLOBAL(Structure):
_fields_ = [
("fovy", c_float),
("ipd", c_float),
("linewidth", c_float),
("glow", c_float),
("offwidth", c_int),
("offheight", c_int),
]
class ANON_QUALITY(Structure):
_fields_ = [
("shadowsize", c_int),
("offsamples", c_int),
("numslices", c_int),
("numstacks", c_int),
("numarrows", c_int),
("numquads", c_int),
]
class ANON_HEADLIGHT(Structure):
_fields_ = [
("ambient", c_float * 3),
("diffuse", c_float * 3),
("specular", c_float * 3),
("active", c_int),
]
class ANON_MAP(Structure):
_fields_ = [
("stiffness", c_float),
("stiffnessrot", c_float),
("force", c_float),
("torque", c_float),
("alpha", c_float),
("fogstart", c_float),
("fogend", c_float),
("znear", c_float),
("zfar", c_float),
("shadowclip", c_float),
("shadowscale", c_float),
]
class ANON_SCALE(Structure):
_fields_ = [
("forcewidth", c_float),
("contactwidth", c_float),
("contactheight", c_float),
("connect", c_float),
("com", c_float),
("camera", c_float),
("light", c_float),
("selectpoint", c_float),
("jointlength", c_float),
("jointwidth", c_float),
("actuatorlength", c_float),
("actuatorwidth", c_float),
("framelength", c_float),
("framewidth", c_float),
("constraint", c_float),
("slidercrank", c_float),
]
class ANON_RGBA(Structure):
_fields_ = [
("fog", c_float * 4),
("force", c_float * 4),
("inertia", c_float * 4),
("joint", c_float * 4),
("actuator", c_float * 4),
("com", c_float * 4),
("camera", c_float * 4),
("light", c_float * 4),
("selectpoint", c_float * 4),
("connect", c_float * 4),
("contactpoint", c_float * 4),
("contactforce", c_float * 4),
("contactfriction", c_float * 4),
("contacttorque", c_float * 4),
("constraint", c_float * 4),
("slidercrank", c_float * 4),
("crankbroken", c_float * 4),
]
_fields_ = [
("global_", ANON_GLOBAL),
("quality", ANON_QUALITY),
("headlight", ANON_HEADLIGHT),
("map_", ANON_MAP),
("scale", ANON_SCALE),
("rgba", ANON_RGBA),
]
class MJSTATISTIC(Structure):
_fields_ = [
("meanmass", c_double),
("meansize", c_double),
("extent", c_double),
("center", c_double * 3),
]
class MJDATA(Structure):
_fields_ = [
("nstack", c_int),
("nbuffer", c_int),
("pstack", c_int),
("maxuse_stack", c_int),
("maxuse_con", c_int),
("maxuse_efc", c_int),
("nwarning", c_int * 8),
("warning_info", c_int * 8),
("timer_ncall", c_int * 13),
("timer_duration", c_double * 13),
("solver_iter", c_int),
("solver_trace", c_double * 200),
("solver_fwdinv", c_double * 2),
("ne", c_int),
("nf", c_int),
("nefc", c_int),
("ncon", c_int),
("time", c_double),
("energy", c_double * 2),
("buffer", POINTER(c_ubyte)),
("stack", POINTER(c_double)),
("qpos", POINTER(c_double)),
("qvel", POINTER(c_double)),
("act", POINTER(c_double)),
("ctrl", POINTER(c_double)),
("qfrc_applied", POINTER(c_double)),
("xfrc_applied", POINTER(c_double)),
("qacc", POINTER(c_double)),
("act_dot", POINTER(c_double)),
("mocap_pos", POINTER(c_double)),
("mocap_quat", POINTER(c_double)),
("userdata", POINTER(c_double)),
("sensordata", POINTER(c_double)),
("xpos", POINTER(c_double)),
("xquat", POINTER(c_double)),
("xmat", POINTER(c_double)),
("xipos", POINTER(c_double)),
("ximat", POINTER(c_double)),
("xanchor", POINTER(c_double)),
("xaxis", POINTER(c_double)),
("geom_xpos", POINTER(c_double)),
("geom_xmat", POINTER(c_double)),
("site_xpos", POINTER(c_double)),
("site_xmat", POINTER(c_double)),
("cam_xpos", POINTER(c_double)),
("cam_xmat", POINTER(c_double)),
("light_xpos", POINTER(c_double)),
("light_xdir", POINTER(c_double)),
("subtree_com", POINTER(c_double)),
("cdof", POINTER(c_double)),
("cinert", POINTER(c_double)),
("ten_wrapadr", POINTER(c_int)),
("ten_wrapnum", POINTER(c_int)),
("ten_length", POINTER(c_double)),
("ten_moment", POINTER(c_double)),
("wrap_obj", POINTER(c_int)),
("wrap_xpos", POINTER(c_double)),
("actuator_length", POINTER(c_double)),
("actuator_moment", POINTER(c_double)),
("crb", POINTER(c_double)),
("qM", POINTER(c_double)),
("qLD", POINTER(c_double)),
("qLDiagInv", POINTER(c_double)),
("qLDiagSqrtInv", POINTER(c_double)),
("contact", POINTER(MJCONTACT)),
("efc_type", POINTER(c_int)),
("efc_id", POINTER(c_int)),
("efc_rownnz", POINTER(c_int)),
("efc_rowadr", POINTER(c_int)),
("efc_colind", POINTER(c_int)),
("efc_rownnz_T", POINTER(c_int)),
("efc_rowadr_T", POINTER(c_int)),
("efc_colind_T", POINTER(c_int)),
("efc_solref", POINTER(c_double)),
("efc_solimp", POINTER(c_double)),
("efc_margin", POINTER(c_double)),
("efc_frictionloss", POINTER(c_double)),
("efc_pos", POINTER(c_double)),
("efc_J", POINTER(c_double)),
("efc_J_T", POINTER(c_double)),
("efc_diagApprox", POINTER(c_double)),
("efc_D", POINTER(c_double)),
("efc_R", POINTER(c_double)),
("efc_AR", POINTER(c_double)),
("e_ARchol", POINTER(c_double)),
("fc_e_rect", POINTER(c_double)),
("fc_AR", POINTER(c_double)),
("ten_velocity", POINTER(c_double)),
("actuator_velocity", POINTER(c_double)),
("cvel", POINTER(c_double)),
("cdof_dot", POINTER(c_double)),
("qfrc_bias", POINTER(c_double)),
("qfrc_passive", POINTER(c_double)),
("efc_vel", POINTER(c_double)),
("efc_aref", POINTER(c_double)),
("subtree_linvel", POINTER(c_double)),
("subtree_angmom", POINTER(c_double)),
("actuator_force", POINTER(c_double)),
("qfrc_actuator", POINTER(c_double)),
("qfrc_unc", POINTER(c_double)),
("qacc_unc", POINTER(c_double)),
("efc_b", POINTER(c_double)),
("fc_b", POINTER(c_double)),
("efc_force", POINTER(c_double)),
("qfrc_constraint", POINTER(c_double)),
("qfrc_inverse", POINTER(c_double)),
("cacc", POINTER(c_double)),
("cfrc_int", POINTER(c_double)),
("cfrc_ext", POINTER(c_double)),
]
class MJMODEL(Structure):
_fields_ = [
("nq", c_int),
("nv", c_int),
("nu", c_int),
("na", c_int),
("nbody", c_int),
("njnt", c_int),
("ngeom", c_int),
("nsite", c_int),
("ncam", c_int),
("nlight", c_int),
("nmesh", c_int),
("nmeshvert", c_int),
("nmeshface", c_int),
("nmeshgraph", c_int),
("nhfield", c_int),
("nhfielddata", c_int),
("ntex", c_int),
("ntexdata", c_int),
("nmat", c_int),
("npair", c_int),
("nexclude", c_int),
("neq", c_int),
("ntendon", c_int),
("nwrap", c_int),
("nsensor", c_int),
("nnumeric", c_int),
("nnumericdata", c_int),
("ntext", c_int),
("ntextdata", c_int),
("ntuple", c_int),
("ntupledata", c_int),
("nkey", c_int),
("nuser_body", c_int),
("nuser_jnt", c_int),
("nuser_geom", c_int),
("nuser_site", c_int),
("nuser_tendon", c_int),
("nuser_actuator", c_int),
("nuser_sensor", c_int),
("nnames", c_int),
("nM", c_int),
("nemax", c_int),
("njmax", c_int),
("nconmax", c_int),
("nstack", c_int),
("nuserdata", c_int),
("nmocap", c_int),
("nsensordata", c_int),
("nbuffer", c_int),
("opt", MJOPTION),
("vis", MJVISUAL),
("stat", MJSTATISTIC),
("buffer", POINTER(c_ubyte)),
("qpos0", POINTER(c_double)),
("qpos_spring", POINTER(c_double)),
("body_parentid", POINTER(c_int)),
("body_rootid", POINTER(c_int)),
("body_weldid", POINTER(c_int)),
("body_mocapid", POINTER(c_int)),
("body_jntnum", POINTER(c_int)),
("body_jntadr", POINTER(c_int)),
("body_dofnum", POINTER(c_int)),
("body_dofadr", POINTER(c_int)),
("body_geomnum", POINTER(c_int)),
("body_geomadr", POINTER(c_int)),
("body_pos", POINTER(c_double)),
("body_quat", POINTER(c_double)),
("body_ipos", POINTER(c_double)),
("body_iquat", POINTER(c_double)),
("body_mass", POINTER(c_double)),
("body_subtreemass", POINTER(c_double)),
("body_inertia", POINTER(c_double)),
("body_invweight0", POINTER(c_double)),
("body_user", POINTER(c_double)),
("jnt_type", POINTER(c_int)),
("jnt_qposadr", POINTER(c_int)),
("jnt_dofadr", POINTER(c_int)),
("jnt_bodyid", POINTER(c_int)),
("jnt_limited", POINTER(c_ubyte)),
("jnt_solref", POINTER(c_double)),
("jnt_solimp", POINTER(c_double)),
("jnt_pos", POINTER(c_double)),
("jnt_axis", POINTER(c_double)),
("jnt_stiffness", POINTER(c_double)),
("jnt_range", POINTER(c_double)),
("jnt_margin", POINTER(c_double)),
("jnt_user", POINTER(c_double)),
("dof_bodyid", POINTER(c_int)),
("dof_jntid", POINTER(c_int)),
("dof_parentid", POINTER(c_int)),
("dof_Madr", POINTER(c_int)),
("dof_frictional", POINTER(c_ubyte)),
("dof_solref", POINTER(c_double)),
("dof_solimp", POINTER(c_double)),
("dof_frictionloss", POINTER(c_double)),
("dof_armature", POINTER(c_double)),
("dof_damping", POINTER(c_double)),
("dof_invweight0", POINTER(c_double)),
("geom_type", POINTER(c_int)),
("geom_contype", POINTER(c_int)),
("geom_conaffinity", POINTER(c_int)),
("geom_condim", POINTER(c_int)),
("geom_bodyid", POINTER(c_int)),
("geom_dataid", POINTER(c_int)),
("geom_matid", POINTER(c_int)),
("geom_group", POINTER(c_int)),
("geom_solmix", POINTER(c_double)),
("geom_solref", POINTER(c_double)),
("geom_solimp", POINTER(c_double)),
("geom_size", POINTER(c_double)),
("geom_rbound", POINTER(c_double)),
("geom_pos", POINTER(c_double)),
("geom_quat", POINTER(c_double)),
("geom_friction", POINTER(c_double)),
("geom_margin", POINTER(c_double)),
("geom_gap", POINTER(c_double)),
("geom_user", POINTER(c_double)),
("geom_rgba", POINTER(c_float)),
("site_type", POINTER(c_int)),
("site_bodyid", POINTER(c_int)),
("site_matid", POINTER(c_int)),
("site_group", POINTER(c_int)),
("site_size", POINTER(c_double)),
("site_pos", POINTER(c_double)),
("site_quat", POINTER(c_double)),
("site_user", POINTER(c_double)),
("site_rgba", POINTER(c_float)),
("cam_mode", POINTER(c_int)),
("cam_bodyid", POINTER(c_int)),
("cam_targetbodyid", POINTER(c_int)),
("cam_pos", POINTER(c_double)),
("cam_quat", POINTER(c_double)),
("cam_poscom0", POINTER(c_double)),
("cam_pos0", POINTER(c_double)),
("cam_mat0", POINTER(c_double)),
("cam_fovy", POINTER(c_double)),
("cam_ipd", POINTER(c_double)),
("light_mode", POINTER(c_int)),
("light_bodyid", POINTER(c_int)),
("light_targetbodyid", POINTER(c_int)),
("light_directional", POINTER(c_ubyte)),
("light_castshadow", POINTER(c_ubyte)),
("light_active", POINTER(c_ubyte)),
("light_pos", POINTER(c_double)),
("light_dir", POINTER(c_double)),
("light_poscom0", POINTER(c_double)),
("light_pos0", POINTER(c_double)),
("light_dir0", POINTER(c_double)),
("light_attenuation", POINTER(c_float)),
("light_cutoff", POINTER(c_float)),
("light_exponent", POINTER(c_float)),
("light_ambient", POINTER(c_float)),
("light_diffuse", POINTER(c_float)),
("light_specular", POINTER(c_float)),
("mesh_faceadr", POINTER(c_int)),
("mesh_facenum", POINTER(c_int)),
("mesh_vertadr", POINTER(c_int)),
("mesh_vertnum", POINTER(c_int)),
("mesh_graphadr", POINTER(c_int)),
("mesh_vert", POINTER(c_float)),
("mesh_normal", POINTER(c_float)),
("mesh_face", POINTER(c_int)),
("mesh_graph", POINTER(c_int)),
("hfield_size", POINTER(c_double)),
("hfield_nrow", POINTER(c_int)),
("hfield_ncol", POINTER(c_int)),
("hfield_adr", POINTER(c_int)),
("hfield_data", POINTER(c_float)),
("tex_type", POINTER(c_int)),
("tex_height", POINTER(c_int)),
("tex_width", POINTER(c_int)),
("tex_adr", POINTER(c_int)),
("tex_rgb", POINTER(c_ubyte)),
("mat_texid", POINTER(c_int)),
("mat_texuniform", POINTER(c_ubyte)),
("mat_texrepeat", POINTER(c_float)),
("mat_emission", POINTER(c_float)),
("mat_specular", POINTER(c_float)),
("mat_shininess", POINTER(c_float)),
("mat_reflectance", POINTER(c_float)),
("mat_rgba", POINTER(c_float)),
("pair_dim", POINTER(c_int)),
("pair_geom1", POINTER(c_int)),
("pair_geom2", POINTER(c_int)),
("pair_signature", POINTER(c_int)),
("pair_solref", POINTER(c_double)),
("pair_solimp", POINTER(c_double)),
("pair_margin", POINTER(c_double)),
("pair_gap", POINTER(c_double)),
("pair_friction", POINTER(c_double)),
("exclude_signature", POINTER(c_int)),
("eq_type", POINTER(c_int)),
("eq_obj1id", POINTER(c_int)),
("eq_obj2id", POINTER(c_int)),
("eq_active", POINTER(c_ubyte)),
("eq_solref", POINTER(c_double)),
("eq_solimp", POINTER(c_double)),
("eq_data", POINTER(c_double)),
("tendon_adr", POINTER(c_int)),
("tendon_num", POINTER(c_int)),
("tendon_matid", POINTER(c_int)),
("tendon_limited", POINTER(c_ubyte)),
("tendon_frictional", POINTER(c_ubyte)),
("tendon_width", POINTER(c_double)),
("tendon_solref_lim", POINTER(c_double)),
("tendon_solimp_lim", POINTER(c_double)),
("tendon_solref_fri", POINTER(c_double)),
("tendon_solimp_fri", POINTER(c_double)),
("tendon_range", POINTER(c_double)),
("tendon_margin", POINTER(c_double)),
("tendon_stiffness", POINTER(c_double)),
("tendon_damping", POINTER(c_double)),
("tendon_frictionloss", POINTER(c_double)),
("tendon_lengthspring", POINTER(c_double)),
("tendon_length0", POINTER(c_double)),
("tendon_invweight0", POINTER(c_double)),
("tendon_user", POINTER(c_double)),
("tendon_rgba", POINTER(c_float)),
("wrap_type", POINTER(c_int)),
("wrap_objid", POINTER(c_int)),
("wrap_prm", POINTER(c_double)),
("actuator_trntype", POINTER(c_int)),
("actuator_dyntype", POINTER(c_int)),
("actuator_gaintype", POINTER(c_int)),
("actuator_biastype", POINTER(c_int)),
("actuator_trnid", POINTER(c_int)),
("actuator_ctrllimited", POINTER(c_ubyte)),
("actuator_forcelimited", POINTER(c_ubyte)),
("actuator_dynprm", POINTER(c_double)),
("actuator_gainprm", POINTER(c_double)),
("actuator_biasprm", POINTER(c_double)),
("actuator_ctrlrange", POINTER(c_double)),
("actuator_forcerange", POINTER(c_double)),
("actuator_gear", POINTER(c_double)),
("actuator_cranklength", POINTER(c_double)),
("actuator_invweight0", POINTER(c_double)),
("actuator_length0", POINTER(c_double)),
("actuator_lengthrange", POINTER(c_double)),
("actuator_user", POINTER(c_double)),
("sensor_type", POINTER(c_int)),
("sensor_datatype", POINTER(c_int)),
("sensor_needstage", POINTER(c_int)),
("sensor_objtype", POINTER(c_int)),
("sensor_objid", POINTER(c_int)),
("sensor_dim", POINTER(c_int)),
("sensor_adr", POINTER(c_int)),
("sensor_noise", POINTER(c_double)),
("sensor_user", POINTER(c_double)),
("numeric_adr", POINTER(c_int)),
("numeric_size", POINTER(c_int)),
("numeric_data", POINTER(c_double)),
("text_adr", POINTER(c_int)),
("text_size", POINTER(c_int)),
("text_data", POINTER(c_char)),
("tuple_adr", POINTER(c_int)),
("tuple_size", POINTER(c_int)),
("tuple_objtype", POINTER(c_int)),
("tuple_objid", POINTER(c_int)),
("tuple_objprm", POINTER(c_double)),
("key_time", POINTER(c_double)),
("key_qpos", POINTER(c_double)),
("key_qvel", POINTER(c_double)),
("key_act", POINTER(c_double)),
("name_bodyadr", POINTER(c_int)),
("name_jntadr", POINTER(c_int)),
("name_geomadr", POINTER(c_int)),
("name_siteadr", POINTER(c_int)),
("name_camadr", POINTER(c_int)),
("name_lightadr", POINTER(c_int)),
("name_meshadr", POINTER(c_int)),
("name_hfieldadr", POINTER(c_int)),
("name_texadr", POINTER(c_int)),
("name_matadr", POINTER(c_int)),
("name_eqadr", POINTER(c_int)),
("name_tendonadr", POINTER(c_int)),
("name_actuatoradr", POINTER(c_int)),
("name_sensoradr", POINTER(c_int)),
("name_numericadr", POINTER(c_int)),
("name_textadr", POINTER(c_int)),
("name_tupleadr", POINTER(c_int)),
("names", POINTER(c_char)),
]
class MjContactWrapper(object):
def __init__(self, wrapped, size_src=None):
self._wrapped = wrapped
self._size_src = size_src
@property
def ptr(self):
return self._wrapped
@property
def obj(self):
return self._wrapped.contents
@property
def dist(self):
return self._wrapped.contents.dist
@dist.setter
def dist(self, value):
self._wrapped.contents.dist = value
@property
def pos(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.pos, dtype=np.double, count=(3)), (3, ))
arr.setflags(write=False)
return arr
@pos.setter
def pos(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.pos, val_ptr, 3 * sizeof(c_double))
@property
def frame(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.frame, dtype=np.double, count=(9)), (9, ))
arr.setflags(write=False)
return arr
@frame.setter
def frame(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.frame, val_ptr, 9 * sizeof(c_double))
@property
def includemargin(self):
return self._wrapped.contents.includemargin
@includemargin.setter
def includemargin(self, value):
self._wrapped.contents.includemargin = value
@property
def friction(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.friction, dtype=np.double, count=(5)), (5, ))
arr.setflags(write=False)
return arr
@friction.setter
def friction(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.friction, val_ptr, 5 * sizeof(c_double))
@property
def solref(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.solref, dtype=np.double, count=(2)), (2, ))
arr.setflags(write=False)
return arr
@solref.setter
def solref(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.solref, val_ptr, 2 * sizeof(c_double))
@property
def solimp(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.solimp, dtype=np.double, count=(3)), (3, ))
arr.setflags(write=False)
return arr
@solimp.setter
def solimp(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.solimp, val_ptr, 3 * sizeof(c_double))
@property
def mu(self):
return self._wrapped.contents.mu
@mu.setter
def mu(self, value):
self._wrapped.contents.mu = value
@property
def coef(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.coef, dtype=np.double, count=(5)), (5, ))
arr.setflags(write=False)
return arr
@coef.setter
def coef(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.coef, val_ptr, 5 * sizeof(c_double))
@property
def zone(self):
return self._wrapped.contents.zone
@zone.setter
def zone(self, value):
self._wrapped.contents.zone = value
@property
def dim(self):
return self._wrapped.contents.dim
@dim.setter
def dim(self, value):
self._wrapped.contents.dim = value
@property
def geom1(self):
return self._wrapped.contents.geom1
@geom1.setter
def geom1(self, value):
self._wrapped.contents.geom1 = value
@property
def geom2(self):
return self._wrapped.contents.geom2
@geom2.setter
def geom2(self, value):
self._wrapped.contents.geom2 = value
@property
def exclude(self):
return self._wrapped.contents.exclude
@exclude.setter
def exclude(self, value):
self._wrapped.contents.exclude = value
@property
def efc_address(self):
return self._wrapped.contents.efc_address
@efc_address.setter
def efc_address(self, value):
self._wrapped.contents.efc_address = value
class MjrRectWrapper(object):
def __init__(self, wrapped, size_src=None):
self._wrapped = wrapped
self._size_src = size_src
@property
def ptr(self):
return self._wrapped
@property
def obj(self):
return self._wrapped.contents
@property
def left(self):
return self._wrapped.contents.left
@left.setter
def left(self, value):
self._wrapped.contents.left = value
@property
def bottom(self):
return self._wrapped.contents.bottom
@bottom.setter
def bottom(self, value):
self._wrapped.contents.bottom = value
@property
def width(self):
return self._wrapped.contents.width
@width.setter
def width(self, value):
self._wrapped.contents.width = value
@property
def height(self):
return self._wrapped.contents.height
@height.setter
def height(self, value):
self._wrapped.contents.height = value
class MjvGeomWrapper(object):
def __init__(self, wrapped, size_src=None):
self._wrapped = wrapped
self._size_src = size_src
@property
def ptr(self):
return self._wrapped
@property
def obj(self):
return self._wrapped.contents
@property
def type(self):
return self._wrapped.contents.type
@type.setter
def type(self, value):
self._wrapped.contents.type = value
@property
def dataid(self):
return self._wrapped.contents.dataid
@dataid.setter
def dataid(self, value):
self._wrapped.contents.dataid = value
@property
def objtype(self):
return self._wrapped.contents.objtype
@objtype.setter
def objtype(self, value):
self._wrapped.contents.objtype = value
@property
def objid(self):
return self._wrapped.contents.objid
@objid.setter
def objid(self, value):
self._wrapped.contents.objid = value
@property
def category(self):
return self._wrapped.contents.category
@category.setter
def category(self, value):
self._wrapped.contents.category = value
@property
def texid(self):
return self._wrapped.contents.texid
@texid.setter
def texid(self, value):
self._wrapped.contents.texid = value
@property
def texuniform(self):
return self._wrapped.contents.texuniform
@texuniform.setter
def texuniform(self, value):
self._wrapped.contents.texuniform = value
@property
def texrepeat(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.texrepeat, dtype=np.float, count=(2)), (2, ))
arr.setflags(write=False)
return arr
@texrepeat.setter
def texrepeat(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.texrepeat, val_ptr, 2 * sizeof(c_float))
@property
def size(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.size, dtype=np.float, count=(3)), (3, ))
arr.setflags(write=False)
return arr
@size.setter
def size(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.size, val_ptr, 3 * sizeof(c_float))
@property
def pos(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.pos, dtype=np.float, count=(3)), (3, ))
arr.setflags(write=False)
return arr
@pos.setter
def pos(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.pos, val_ptr, 3 * sizeof(c_float))
@property
def mat(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.mat, dtype=np.float, count=(9)), (9, ))
arr.setflags(write=False)
return arr
@mat.setter
def mat(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.mat, val_ptr, 9 * sizeof(c_float))
@property
def rgba(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.rgba, dtype=np.float, count=(4)), (4, ))
arr.setflags(write=False)
return arr
@rgba.setter
def rgba(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.rgba, val_ptr, 4 * sizeof(c_float))
@property
def emission(self):
return self._wrapped.contents.emission
@emission.setter
def emission(self, value):
self._wrapped.contents.emission = value
@property
def specular(self):
return self._wrapped.contents.specular
@specular.setter
def specular(self, value):
self._wrapped.contents.specular = value
@property
def shininess(self):
return self._wrapped.contents.shininess
@shininess.setter
def shininess(self, value):
self._wrapped.contents.shininess = value
@property
def reflectance(self):
return self._wrapped.contents.reflectance
@reflectance.setter
def reflectance(self, value):
self._wrapped.contents.reflectance = value
@property
def label(self):
return self._wrapped.contents.label
@label.setter
def label(self, value):
self._wrapped.contents.label = value
@property
def camdist(self):
return self._wrapped.contents.camdist
@camdist.setter
def camdist(self, value):
self._wrapped.contents.camdist = value
@property
def rbound(self):
return self._wrapped.contents.rbound
@rbound.setter
def rbound(self, value):
self._wrapped.contents.rbound = value
@property
def transparent(self):
return self._wrapped.contents.transparent
@transparent.setter
def transparent(self, value):
self._wrapped.contents.transparent = value
class MjvSceneWrapper(object):
def __init__(self, wrapped, size_src=None):
self._wrapped = wrapped
self._size_src = size_src
@property
def ptr(self):
return self._wrapped
@property
def obj(self):
return self._wrapped.contents
@property
def maxgeom(self):
return self._wrapped.contents.maxgeom
@maxgeom.setter
def maxgeom(self, value):
self._wrapped.contents.maxgeom = value
@property
def ngeom(self):
return self._wrapped.contents.ngeom
@ngeom.setter
def ngeom(self, value):
self._wrapped.contents.ngeom = value
@property
def nlight(self):
return self._wrapped.contents.nlight
@nlight.setter
def nlight(self, value):
self._wrapped.contents.nlight = value
@property
def lights(self):
return self._wrapped.contents.lights
@lights.setter
def lights(self, value):
self._wrapped.contents.lights = value
@property
def camera(self):
return self._wrapped.contents.camera
@camera.setter
def camera(self, value):
self._wrapped.contents.camera = value
@property
def enabletransform(self):
return self._wrapped.contents.enabletransform
@enabletransform.setter
def enabletransform(self, value):
self._wrapped.contents.enabletransform = value
@property
def translate(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.translate, dtype=np.float, count=(3)), (3, ))
arr.setflags(write=False)
return arr
@translate.setter
def translate(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.translate, val_ptr, 3 * sizeof(c_float))
@property
def rotate(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.rotate, dtype=np.float, count=(4)), (4, ))
arr.setflags(write=False)
return arr
@rotate.setter
def rotate(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.rotate, val_ptr, 4 * sizeof(c_float))
@property
def scale(self):
return self._wrapped.contents.scale
@scale.setter
def scale(self, value):
self._wrapped.contents.scale = value
@property
def stereo(self):
return self._wrapped.contents.stereo
@stereo.setter
def stereo(self, value):
self._wrapped.contents.stereo = value
@property
def flags(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.flags, dtype=np.uint8, count=(5)), (5, ))
arr.setflags(write=False)
return arr
@flags.setter
def flags(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte))
memmove(self._wrapped.contents.flags, val_ptr, 5 * sizeof(c_ubyte))
class MjvPerturbWrapper(object):
def __init__(self, wrapped, size_src=None):
self._wrapped = wrapped
self._size_src = size_src
@property
def ptr(self):
return self._wrapped
@property
def obj(self):
return self._wrapped.contents
@property
def select(self):
return self._wrapped.contents.select
@select.setter
def select(self, value):
self._wrapped.contents.select = value
@property
def active(self):
return self._wrapped.contents.active
@active.setter
def active(self, value):
self._wrapped.contents.active = value
@property
def refpos(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.refpos, dtype=np.double, count=(3)), (3, ))
arr.setflags(write=False)
return arr
@refpos.setter
def refpos(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.refpos, val_ptr, 3 * sizeof(c_double))
@property
def refquat(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.refquat, dtype=np.double, count=(4)), (4, ))
arr.setflags(write=False)
return arr
@refquat.setter
def refquat(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.refquat, val_ptr, 4 * sizeof(c_double))
@property
def localpos(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.localpos, dtype=np.double, count=(3)), (3, ))
arr.setflags(write=False)
return arr
@localpos.setter
def localpos(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.localpos, val_ptr, 3 * sizeof(c_double))
@property
def scale(self):
return self._wrapped.contents.scale
@scale.setter
def scale(self, value):
self._wrapped.contents.scale = value
class MjrContextWrapper(object):
def __init__(self, wrapped, size_src=None):
self._wrapped = wrapped
self._size_src = size_src
@property
def ptr(self):
return self._wrapped
@property
def obj(self):
return self._wrapped.contents
@property
def lineWidth(self):
return self._wrapped.contents.lineWidth
@lineWidth.setter
def lineWidth(self, value):
self._wrapped.contents.lineWidth = value
@property
def shadowClip(self):
return self._wrapped.contents.shadowClip
@shadowClip.setter
def shadowClip(self, value):
self._wrapped.contents.shadowClip = value
@property
def shadowScale(self):
return self._wrapped.contents.shadowScale
@shadowScale.setter
def shadowScale(self, value):
self._wrapped.contents.shadowScale = value
@property
def shadowSize(self):
return self._wrapped.contents.shadowSize
@shadowSize.setter
def shadowSize(self, value):
self._wrapped.contents.shadowSize = value
@property
def offWidth(self):
return self._wrapped.contents.offWidth
@offWidth.setter
def offWidth(self, value):
self._wrapped.contents.offWidth = value
@property
def offHeight(self):
return self._wrapped.contents.offHeight
@offHeight.setter
def offHeight(self, value):
self._wrapped.contents.offHeight = value
@property
def offSamples(self):
return self._wrapped.contents.offSamples
@offSamples.setter
def offSamples(self, value):
self._wrapped.contents.offSamples = value
@property
def offFBO(self):
return self._wrapped.contents.offFBO
@offFBO.setter
def offFBO(self, value):
self._wrapped.contents.offFBO = value
@property
def offFBO_r(self):
return self._wrapped.contents.offFBO_r
@offFBO_r.setter
def offFBO_r(self, value):
self._wrapped.contents.offFBO_r = value
@property
def offColor(self):
return self._wrapped.contents.offColor
@offColor.setter
def offColor(self, value):
self._wrapped.contents.offColor = value
@property
def offColor_r(self):
return self._wrapped.contents.offColor_r
@offColor_r.setter
def offColor_r(self, value):
self._wrapped.contents.offColor_r = value
@property
def offDepthStencil(self):
return self._wrapped.contents.offDepthStencil
@offDepthStencil.setter
def offDepthStencil(self, value):
self._wrapped.contents.offDepthStencil = value
@property
def offDepthStencil_r(self):
return self._wrapped.contents.offDepthStencil_r
@offDepthStencil_r.setter
def offDepthStencil_r(self, value):
self._wrapped.contents.offDepthStencil_r = value
@property
def shadowFBO(self):
return self._wrapped.contents.shadowFBO
@shadowFBO.setter
def shadowFBO(self, value):
self._wrapped.contents.shadowFBO = value
@property
def shadowTex(self):
return self._wrapped.contents.shadowTex
@shadowTex.setter
def shadowTex(self, value):
self._wrapped.contents.shadowTex = value
@property
def ntexture(self):
return self._wrapped.contents.ntexture
@ntexture.setter
def ntexture(self, value):
self._wrapped.contents.ntexture = value
@property
def textureType(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.textureType, dtype=np.int, count=(100)), (100, ))
arr.setflags(write=False)
return arr
@textureType.setter
def textureType(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.textureType, val_ptr, 100 * sizeof(c_int))
@property
def texture(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.texture, dtype=np.int, count=(100)), (100, ))
arr.setflags(write=False)
return arr
@texture.setter
def texture(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.texture, val_ptr, 100 * sizeof(c_int))
@property
def basePlane(self):
return self._wrapped.contents.basePlane
@basePlane.setter
def basePlane(self, value):
self._wrapped.contents.basePlane = value
@property
def baseMesh(self):
return self._wrapped.contents.baseMesh
@baseMesh.setter
def baseMesh(self, value):
self._wrapped.contents.baseMesh = value
@property
def baseHField(self):
return self._wrapped.contents.baseHField
@baseHField.setter
def baseHField(self, value):
self._wrapped.contents.baseHField = value
@property
def baseBuiltin(self):
return self._wrapped.contents.baseBuiltin
@baseBuiltin.setter
def baseBuiltin(self, value):
self._wrapped.contents.baseBuiltin = value
@property
def baseFontNormal(self):
return self._wrapped.contents.baseFontNormal
@baseFontNormal.setter
def baseFontNormal(self, value):
self._wrapped.contents.baseFontNormal = value
@property
def baseFontShadow(self):
return self._wrapped.contents.baseFontShadow
@baseFontShadow.setter
def baseFontShadow(self, value):
self._wrapped.contents.baseFontShadow = value
@property
def baseFontBig(self):
return self._wrapped.contents.baseFontBig
@baseFontBig.setter
def baseFontBig(self, value):
self._wrapped.contents.baseFontBig = value
@property
def rangePlane(self):
return self._wrapped.contents.rangePlane
@rangePlane.setter
def rangePlane(self, value):
self._wrapped.contents.rangePlane = value
@property
def rangeMesh(self):
return self._wrapped.contents.rangeMesh
@rangeMesh.setter
def rangeMesh(self, value):
self._wrapped.contents.rangeMesh = value
@property
def rangeHField(self):
return self._wrapped.contents.rangeHField
@rangeHField.setter
def rangeHField(self, value):
self._wrapped.contents.rangeHField = value
@property
def rangeBuiltin(self):
return self._wrapped.contents.rangeBuiltin
@rangeBuiltin.setter
def rangeBuiltin(self, value):
self._wrapped.contents.rangeBuiltin = value
@property
def rangeFont(self):
return self._wrapped.contents.rangeFont
@rangeFont.setter
def rangeFont(self, value):
self._wrapped.contents.rangeFont = value
@property
def charWidth(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.charWidth, dtype=np.int, count=(127)), (127, ))
arr.setflags(write=False)
return arr
@charWidth.setter
def charWidth(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.charWidth, val_ptr, 127 * sizeof(c_int))
@property
def charWidthBig(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.charWidthBig, dtype=np.int, count=(127)), (127, ))
arr.setflags(write=False)
return arr
@charWidthBig.setter
def charWidthBig(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.charWidthBig, val_ptr, 127 * sizeof(c_int))
@property
def charHeight(self):
return self._wrapped.contents.charHeight
@charHeight.setter
def charHeight(self, value):
self._wrapped.contents.charHeight = value
@property
def charHeightBig(self):
return self._wrapped.contents.charHeightBig
@charHeightBig.setter
def charHeightBig(self, value):
self._wrapped.contents.charHeightBig = value
@property
def glewInitialized(self):
return self._wrapped.contents.glewInitialized
@glewInitialized.setter
def glewInitialized(self, value):
self._wrapped.contents.glewInitialized = value
@property
def windowAvailable(self):
return self._wrapped.contents.windowAvailable
@windowAvailable.setter
def windowAvailable(self, value):
self._wrapped.contents.windowAvailable = value
@property
def windowSamples(self):
return self._wrapped.contents.windowSamples
@windowSamples.setter
def windowSamples(self, value):
self._wrapped.contents.windowSamples = value
@property
def windowStereo(self):
return self._wrapped.contents.windowStereo
@windowStereo.setter
def windowStereo(self, value):
self._wrapped.contents.windowStereo = value
@property
def windowDoublebuffer(self):
return self._wrapped.contents.windowDoublebuffer
@windowDoublebuffer.setter
def windowDoublebuffer(self, value):
self._wrapped.contents.windowDoublebuffer = value
@property
def currentBuffer(self):
return self._wrapped.contents.currentBuffer
@currentBuffer.setter
def currentBuffer(self, value):
self._wrapped.contents.currentBuffer = value
class MjvCameraWrapper(object):
def __init__(self, wrapped, size_src=None):
self._wrapped = wrapped
self._size_src = size_src
@property
def ptr(self):
return self._wrapped
@property
def obj(self):
return self._wrapped.contents
@property
def type(self):
return self._wrapped.contents.type
@type.setter
def type(self, value):
self._wrapped.contents.type = value
@property
def fixedcamid(self):
return self._wrapped.contents.fixedcamid
@fixedcamid.setter
def fixedcamid(self, value):
self._wrapped.contents.fixedcamid = value
@property
def trackbodyid(self):
return self._wrapped.contents.trackbodyid
@trackbodyid.setter
def trackbodyid(self, value):
self._wrapped.contents.trackbodyid = value
@property
def lookat(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.lookat, dtype=np.double, count=(3)), (3, ))
arr.setflags(write=False)
return arr
@lookat.setter
def lookat(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.lookat, val_ptr, 3 * sizeof(c_double))
@property
def distance(self):
return self._wrapped.contents.distance
@distance.setter
def distance(self, value):
self._wrapped.contents.distance = value
@property
def azimuth(self):
return self._wrapped.contents.azimuth
@azimuth.setter
def azimuth(self, value):
self._wrapped.contents.azimuth = value
@property
def elevation(self):
return self._wrapped.contents.elevation
@elevation.setter
def elevation(self, value):
self._wrapped.contents.elevation = value
class MjvOptionWrapper(object):
def __init__(self, wrapped, size_src=None):
self._wrapped = wrapped
self._size_src = size_src
@property
def ptr(self):
return self._wrapped
@property
def obj(self):
return self._wrapped.contents
@property
def label(self):
return self._wrapped.contents.label
@label.setter
def label(self, value):
self._wrapped.contents.label = value
@property
def frame(self):
return self._wrapped.contents.frame
@frame.setter
def frame(self, value):
self._wrapped.contents.frame = value
@property
def geomgroup(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.geomgroup, dtype=np.uint8, count=(5)), (5, ))
arr.setflags(write=False)
return arr
@geomgroup.setter
def geomgroup(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte))
memmove(self._wrapped.contents.geomgroup, val_ptr, 5 * sizeof(c_ubyte))
@property
def sitegroup(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.sitegroup, dtype=np.uint8, count=(5)), (5, ))
arr.setflags(write=False)
return arr
@sitegroup.setter
def sitegroup(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte))
memmove(self._wrapped.contents.sitegroup, val_ptr, 5 * sizeof(c_ubyte))
@property
def flags(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.flags, dtype=np.uint8, count=(18)), (18, ))
arr.setflags(write=False)
return arr
@flags.setter
def flags(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte))
memmove(self._wrapped.contents.flags, val_ptr, 18 * sizeof(c_ubyte))
class MjvGeomWrapper(object):
def __init__(self, wrapped, size_src=None):
self._wrapped = wrapped
self._size_src = size_src
@property
def ptr(self):
return self._wrapped
@property
def obj(self):
return self._wrapped.contents
@property
def type(self):
return self._wrapped.contents.type
@type.setter
def type(self, value):
self._wrapped.contents.type = value
@property
def dataid(self):
return self._wrapped.contents.dataid
@dataid.setter
def dataid(self, value):
self._wrapped.contents.dataid = value
@property
def objtype(self):
return self._wrapped.contents.objtype
@objtype.setter
def objtype(self, value):
self._wrapped.contents.objtype = value
@property
def objid(self):
return self._wrapped.contents.objid
@objid.setter
def objid(self, value):
self._wrapped.contents.objid = value
@property
def category(self):
return self._wrapped.contents.category
@category.setter
def category(self, value):
self._wrapped.contents.category = value
@property
def texid(self):
return self._wrapped.contents.texid
@texid.setter
def texid(self, value):
self._wrapped.contents.texid = value
@property
def texuniform(self):
return self._wrapped.contents.texuniform
@texuniform.setter
def texuniform(self, value):
self._wrapped.contents.texuniform = value
@property
def texrepeat(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.texrepeat, dtype=np.float, count=(2)), (2, ))
arr.setflags(write=False)
return arr
@texrepeat.setter
def texrepeat(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.texrepeat, val_ptr, 2 * sizeof(c_float))
@property
def size(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.size, dtype=np.float, count=(3)), (3, ))
arr.setflags(write=False)
return arr
@size.setter
def size(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.size, val_ptr, 3 * sizeof(c_float))
@property
def pos(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.pos, dtype=np.float, count=(3)), (3, ))
arr.setflags(write=False)
return arr
@pos.setter
def pos(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.pos, val_ptr, 3 * sizeof(c_float))
@property
def mat(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.mat, dtype=np.float, count=(9)), (9, ))
arr.setflags(write=False)
return arr
@mat.setter
def mat(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.mat, val_ptr, 9 * sizeof(c_float))
@property
def rgba(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.rgba, dtype=np.float, count=(4)), (4, ))
arr.setflags(write=False)
return arr
@rgba.setter
def rgba(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.rgba, val_ptr, 4 * sizeof(c_float))
@property
def emission(self):
return self._wrapped.contents.emission
@emission.setter
def emission(self, value):
self._wrapped.contents.emission = value
@property
def specular(self):
return self._wrapped.contents.specular
@specular.setter
def specular(self, value):
self._wrapped.contents.specular = value
@property
def shininess(self):
return self._wrapped.contents.shininess
@shininess.setter
def shininess(self, value):
self._wrapped.contents.shininess = value
@property
def reflectance(self):
return self._wrapped.contents.reflectance
@reflectance.setter
def reflectance(self, value):
self._wrapped.contents.reflectance = value
@property
def label(self):
return self._wrapped.contents.label
@label.setter
def label(self, value):
self._wrapped.contents.label = value
@property
def camdist(self):
return self._wrapped.contents.camdist
@camdist.setter
def camdist(self, value):
self._wrapped.contents.camdist = value
@property
def rbound(self):
return self._wrapped.contents.rbound
@rbound.setter
def rbound(self, value):
self._wrapped.contents.rbound = value
@property
def transparent(self):
return self._wrapped.contents.transparent
@transparent.setter
def transparent(self, value):
self._wrapped.contents.transparent = value
class MjvLightWrapper(object):
def __init__(self, wrapped, size_src=None):
self._wrapped = wrapped
self._size_src = size_src
@property
def ptr(self):
return self._wrapped
@property
def obj(self):
return self._wrapped.contents
@property
def pos(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.pos, dtype=np.float, count=(3)), (3, ))
arr.setflags(write=False)
return arr
@pos.setter
def pos(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.pos, val_ptr, 3 * sizeof(c_float))
@property
def dir(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.dir, dtype=np.float, count=(3)), (3, ))
arr.setflags(write=False)
return arr
@dir.setter
def dir(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.dir, val_ptr, 3 * sizeof(c_float))
@property
def attenuation(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.attenuation, dtype=np.float, count=(3)), (3, ))
arr.setflags(write=False)
return arr
@attenuation.setter
def attenuation(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.attenuation, val_ptr, 3 * sizeof(c_float))
@property
def cutoff(self):
return self._wrapped.contents.cutoff
@cutoff.setter
def cutoff(self, value):
self._wrapped.contents.cutoff = value
@property
def exponent(self):
return self._wrapped.contents.exponent
@exponent.setter
def exponent(self, value):
self._wrapped.contents.exponent = value
@property
def ambient(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.ambient, dtype=np.float, count=(3)), (3, ))
arr.setflags(write=False)
return arr
@ambient.setter
def ambient(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.ambient, val_ptr, 3 * sizeof(c_float))
@property
def diffuse(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.diffuse, dtype=np.float, count=(3)), (3, ))
arr.setflags(write=False)
return arr
@diffuse.setter
def diffuse(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.diffuse, val_ptr, 3 * sizeof(c_float))
@property
def specular(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.specular, dtype=np.float, count=(3)), (3, ))
arr.setflags(write=False)
return arr
@specular.setter
def specular(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.specular, val_ptr, 3 * sizeof(c_float))
@property
def headlight(self):
return self._wrapped.contents.headlight
@headlight.setter
def headlight(self, value):
self._wrapped.contents.headlight = value
@property
def directional(self):
return self._wrapped.contents.directional
@directional.setter
def directional(self, value):
self._wrapped.contents.directional = value
@property
def castshadow(self):
return self._wrapped.contents.castshadow
@castshadow.setter
def castshadow(self, value):
self._wrapped.contents.castshadow = value
class MjOptionWrapper(object):
def __init__(self, wrapped, size_src=None):
self._wrapped = wrapped
self._size_src = size_src
@property
def ptr(self):
return self._wrapped
@property
def obj(self):
return self._wrapped.contents
@property
def timestep(self):
return self._wrapped.contents.timestep
@timestep.setter
def timestep(self, value):
self._wrapped.contents.timestep = value
@property
def apirate(self):
return self._wrapped.contents.apirate
@apirate.setter
def apirate(self, value):
self._wrapped.contents.apirate = value
@property
def tolerance(self):
return self._wrapped.contents.tolerance
@tolerance.setter
def tolerance(self, value):
self._wrapped.contents.tolerance = value
@property
def impratio(self):
return self._wrapped.contents.impratio
@impratio.setter
def impratio(self, value):
self._wrapped.contents.impratio = value
@property
def gravity(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.gravity, dtype=np.double, count=(3)), (3, ))
arr.setflags(write=False)
return arr
@gravity.setter
def gravity(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.gravity, val_ptr, 3 * sizeof(c_double))
@property
def wind(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.wind, dtype=np.double, count=(3)), (3, ))
arr.setflags(write=False)
return arr
@wind.setter
def wind(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.wind, val_ptr, 3 * sizeof(c_double))
@property
def magnetic(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.magnetic, dtype=np.double, count=(3)), (3, ))
arr.setflags(write=False)
return arr
@magnetic.setter
def magnetic(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.magnetic, val_ptr, 3 * sizeof(c_double))
@property
def density(self):
return self._wrapped.contents.density
@density.setter
def density(self, value):
self._wrapped.contents.density = value
@property
def viscosity(self):
return self._wrapped.contents.viscosity
@viscosity.setter
def viscosity(self, value):
self._wrapped.contents.viscosity = value
@property
def o_margin(self):
return self._wrapped.contents.o_margin
@o_margin.setter
def o_margin(self, value):
self._wrapped.contents.o_margin = value
@property
def o_solref(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.o_solref, dtype=np.double, count=(2)), (2, ))
arr.setflags(write=False)
return arr
@o_solref.setter
def o_solref(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.o_solref, val_ptr, 2 * sizeof(c_double))
@property
def o_solimp(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.o_solimp, dtype=np.double, count=(3)), (3, ))
arr.setflags(write=False)
return arr
@o_solimp.setter
def o_solimp(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.o_solimp, val_ptr, 3 * sizeof(c_double))
@property
def mpr_tolerance(self):
return self._wrapped.contents.mpr_tolerance
@mpr_tolerance.setter
def mpr_tolerance(self, value):
self._wrapped.contents.mpr_tolerance = value
@property
def mpr_iterations(self):
return self._wrapped.contents.mpr_iterations
@mpr_iterations.setter
def mpr_iterations(self, value):
self._wrapped.contents.mpr_iterations = value
@property
def integrator(self):
return self._wrapped.contents.integrator
@integrator.setter
def integrator(self, value):
self._wrapped.contents.integrator = value
@property
def collision(self):
return self._wrapped.contents.collision
@collision.setter
def collision(self, value):
self._wrapped.contents.collision = value
@property
def impedance(self):
return self._wrapped.contents.impedance
@impedance.setter
def impedance(self, value):
self._wrapped.contents.impedance = value
@property
def reference(self):
return self._wrapped.contents.reference
@reference.setter
def reference(self, value):
self._wrapped.contents.reference = value
@property
def solver(self):
return self._wrapped.contents.solver
@solver.setter
def solver(self, value):
self._wrapped.contents.solver = value
@property
def iterations(self):
return self._wrapped.contents.iterations
@iterations.setter
def iterations(self, value):
self._wrapped.contents.iterations = value
@property
def disableflags(self):
return self._wrapped.contents.disableflags
@disableflags.setter
def disableflags(self, value):
self._wrapped.contents.disableflags = value
@property
def enableflags(self):
return self._wrapped.contents.enableflags
@enableflags.setter
def enableflags(self, value):
self._wrapped.contents.enableflags = value
class MjVisualWrapper(object):
def __init__(self, wrapped, size_src=None):
self._wrapped = wrapped
self._size_src = size_src
@property
def ptr(self):
return self._wrapped
@property
def obj(self):
return self._wrapped.contents
@property
def global_(self):
return self._wrapped.contents.global_
@global_.setter
def global_(self, value):
self._wrapped.contents.global_ = value
@property
def quality(self):
return self._wrapped.contents.quality
@quality.setter
def quality(self, value):
self._wrapped.contents.quality = value
@property
def headlight(self):
return self._wrapped.contents.headlight
@headlight.setter
def headlight(self, value):
self._wrapped.contents.headlight = value
@property
def map_(self):
return self._wrapped.contents.map_
@map_.setter
def map_(self, value):
self._wrapped.contents.map_ = value
@property
def scale(self):
return self._wrapped.contents.scale
@scale.setter
def scale(self, value):
self._wrapped.contents.scale = value
@property
def rgba(self):
return self._wrapped.contents.rgba
@rgba.setter
def rgba(self, value):
self._wrapped.contents.rgba = value
class MjStatisticWrapper(object):
def __init__(self, wrapped, size_src=None):
self._wrapped = wrapped
self._size_src = size_src
@property
def ptr(self):
return self._wrapped
@property
def obj(self):
return self._wrapped.contents
@property
def meanmass(self):
return self._wrapped.contents.meanmass
@meanmass.setter
def meanmass(self, value):
self._wrapped.contents.meanmass = value
@property
def meansize(self):
return self._wrapped.contents.meansize
@meansize.setter
def meansize(self, value):
self._wrapped.contents.meansize = value
@property
def extent(self):
return self._wrapped.contents.extent
@extent.setter
def extent(self, value):
self._wrapped.contents.extent = value
@property
def center(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.center, dtype=np.double, count=(3)), (3, ))
arr.setflags(write=False)
return arr
@center.setter
def center(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.center, val_ptr, 3 * sizeof(c_double))
class MjDataWrapper(object):
def __init__(self, wrapped, size_src=None):
self._wrapped = wrapped
self._size_src = size_src
@property
def ptr(self):
return self._wrapped
@property
def obj(self):
return self._wrapped.contents
@property
def nstack(self):
return self._wrapped.contents.nstack
@nstack.setter
def nstack(self, value):
self._wrapped.contents.nstack = value
@property
def nbuffer(self):
return self._wrapped.contents.nbuffer
@nbuffer.setter
def nbuffer(self, value):
self._wrapped.contents.nbuffer = value
@property
def pstack(self):
return self._wrapped.contents.pstack
@pstack.setter
def pstack(self, value):
self._wrapped.contents.pstack = value
@property
def maxuse_stack(self):
return self._wrapped.contents.maxuse_stack
@maxuse_stack.setter
def maxuse_stack(self, value):
self._wrapped.contents.maxuse_stack = value
@property
def maxuse_con(self):
return self._wrapped.contents.maxuse_con
@maxuse_con.setter
def maxuse_con(self, value):
self._wrapped.contents.maxuse_con = value
@property
def maxuse_efc(self):
return self._wrapped.contents.maxuse_efc
@maxuse_efc.setter
def maxuse_efc(self, value):
self._wrapped.contents.maxuse_efc = value
@property
def nwarning(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.nwarning, dtype=np.int, count=(8)), (8, ))
arr.setflags(write=False)
return arr
@nwarning.setter
def nwarning(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.nwarning, val_ptr, 8 * sizeof(c_int))
@property
def warning_info(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.warning_info, dtype=np.int, count=(8)), (8, ))
arr.setflags(write=False)
return arr
@warning_info.setter
def warning_info(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.warning_info, val_ptr, 8 * sizeof(c_int))
@property
def timer_ncall(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.timer_ncall, dtype=np.int, count=(13)), (13, ))
arr.setflags(write=False)
return arr
@timer_ncall.setter
def timer_ncall(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.timer_ncall, val_ptr, 13 * sizeof(c_int))
@property
def timer_duration(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.timer_duration, dtype=np.double, count=(13)), (13, ))
arr.setflags(write=False)
return arr
@timer_duration.setter
def timer_duration(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.timer_duration, val_ptr, 13 * sizeof(c_double))
@property
def solver_iter(self):
return self._wrapped.contents.solver_iter
@solver_iter.setter
def solver_iter(self, value):
self._wrapped.contents.solver_iter = value
@property
def solver_trace(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.solver_trace, dtype=np.double, count=(200)), (200, ))
arr.setflags(write=False)
return arr
@solver_trace.setter
def solver_trace(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.solver_trace, val_ptr, 200 * sizeof(c_double))
@property
def solver_fwdinv(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.solver_fwdinv, dtype=np.double, count=(2)), (2, ))
arr.setflags(write=False)
return arr
@solver_fwdinv.setter
def solver_fwdinv(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.solver_fwdinv, val_ptr, 2 * sizeof(c_double))
@property
def ne(self):
return self._wrapped.contents.ne
@ne.setter
def ne(self, value):
self._wrapped.contents.ne = value
@property
def nf(self):
return self._wrapped.contents.nf
@nf.setter
def nf(self, value):
self._wrapped.contents.nf = value
@property
def nefc(self):
return self._wrapped.contents.nefc
@nefc.setter
def nefc(self, value):
self._wrapped.contents.nefc = value
@property
def ncon(self):
return self._wrapped.contents.ncon
@ncon.setter
def ncon(self, value):
self._wrapped.contents.ncon = value
@property
def time(self):
return self._wrapped.contents.time
@time.setter
def time(self, value):
self._wrapped.contents.time = value
@property
def energy(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.energy, dtype=np.double, count=(2)), (2, ))
arr.setflags(write=False)
return arr
@energy.setter
def energy(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.energy, val_ptr, 2 * sizeof(c_double))
@property
def buffer(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.buffer, dtype=np.uint8, count=(self.nbuffer)), (self.nbuffer, ))
arr.setflags(write=False)
return arr
@buffer.setter
def buffer(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte))
memmove(self._wrapped.contents.buffer, val_ptr, self.nbuffer * sizeof(c_ubyte))
@property
def stack(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.stack, dtype=np.double, count=(self.nstack)), (self.nstack, ))
arr.setflags(write=False)
return arr
@stack.setter
def stack(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.stack, val_ptr, self.nstack * sizeof(c_double))
@property
def qpos(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.qpos, dtype=np.double, count=(self._size_src.nq*1)), (self._size_src.nq, 1, ))
arr.setflags(write=False)
return arr
@qpos.setter
def qpos(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.qpos, val_ptr, self._size_src.nq*1 * sizeof(c_double))
@property
def qvel(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.qvel, dtype=np.double, count=(self._size_src.nv*1)), (self._size_src.nv, 1, ))
arr.setflags(write=False)
return arr
@qvel.setter
def qvel(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.qvel, val_ptr, self._size_src.nv*1 * sizeof(c_double))
@property
def act(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.act, dtype=np.double, count=(self._size_src.na*1)), (self._size_src.na, 1, ))
arr.setflags(write=False)
return arr
@act.setter
def act(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.act, val_ptr, self._size_src.na*1 * sizeof(c_double))
@property
def ctrl(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.ctrl, dtype=np.double, count=(self._size_src.nu*1)), (self._size_src.nu, 1, ))
arr.setflags(write=False)
return arr
@ctrl.setter
def ctrl(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.ctrl, val_ptr, self._size_src.nu*1 * sizeof(c_double))
@property
def qfrc_applied(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.qfrc_applied, dtype=np.double, count=(self._size_src.nv*1)), (self._size_src.nv, 1, ))
arr.setflags(write=False)
return arr
@qfrc_applied.setter
def qfrc_applied(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.qfrc_applied, val_ptr, self._size_src.nv*1 * sizeof(c_double))
@property
def xfrc_applied(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.xfrc_applied, dtype=np.double, count=(self._size_src.nbody*6)), (self._size_src.nbody, 6, ))
arr.setflags(write=False)
return arr
@xfrc_applied.setter
def xfrc_applied(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.xfrc_applied, val_ptr, self._size_src.nbody*6 * sizeof(c_double))
@property
def qacc(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.qacc, dtype=np.double, count=(self._size_src.nv*1)), (self._size_src.nv, 1, ))
arr.setflags(write=False)
return arr
@qacc.setter
def qacc(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.qacc, val_ptr, self._size_src.nv*1 * sizeof(c_double))
@property
def act_dot(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.act_dot, dtype=np.double, count=(self._size_src.na*1)), (self._size_src.na, 1, ))
arr.setflags(write=False)
return arr
@act_dot.setter
def act_dot(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.act_dot, val_ptr, self._size_src.na*1 * sizeof(c_double))
@property
def mocap_pos(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.mocap_pos, dtype=np.double, count=(self._size_src.nmocap*3)), (self._size_src.nmocap, 3, ))
arr.setflags(write=False)
return arr
@mocap_pos.setter
def mocap_pos(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.mocap_pos, val_ptr, self._size_src.nmocap*3 * sizeof(c_double))
@property
def mocap_quat(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.mocap_quat, dtype=np.double, count=(self._size_src.nmocap*4)), (self._size_src.nmocap, 4, ))
arr.setflags(write=False)
return arr
@mocap_quat.setter
def mocap_quat(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.mocap_quat, val_ptr, self._size_src.nmocap*4 * sizeof(c_double))
@property
def userdata(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.userdata, dtype=np.double, count=(self._size_src.nuserdata*1)), (self._size_src.nuserdata, 1, ))
arr.setflags(write=False)
return arr
@userdata.setter
def userdata(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.userdata, val_ptr, self._size_src.nuserdata*1 * sizeof(c_double))
@property
def sensordata(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.sensordata, dtype=np.double, count=(self._size_src.nsensordata*1)), (self._size_src.nsensordata, 1, ))
arr.setflags(write=False)
return arr
@sensordata.setter
def sensordata(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.sensordata, val_ptr, self._size_src.nsensordata*1 * sizeof(c_double))
@property
def xpos(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.xpos, dtype=np.double, count=(self._size_src.nbody*3)), (self._size_src.nbody, 3, ))
arr.setflags(write=False)
return arr
@xpos.setter
def xpos(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.xpos, val_ptr, self._size_src.nbody*3 * sizeof(c_double))
@property
def xquat(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.xquat, dtype=np.double, count=(self._size_src.nbody*4)), (self._size_src.nbody, 4, ))
arr.setflags(write=False)
return arr
@xquat.setter
def xquat(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.xquat, val_ptr, self._size_src.nbody*4 * sizeof(c_double))
@property
def xmat(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.xmat, dtype=np.double, count=(self._size_src.nbody*9)), (self._size_src.nbody, 9, ))
arr.setflags(write=False)
return arr
@xmat.setter
def xmat(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.xmat, val_ptr, self._size_src.nbody*9 * sizeof(c_double))
@property
def xipos(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.xipos, dtype=np.double, count=(self._size_src.nbody*3)), (self._size_src.nbody, 3, ))
arr.setflags(write=False)
return arr
@xipos.setter
def xipos(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.xipos, val_ptr, self._size_src.nbody*3 * sizeof(c_double))
@property
def ximat(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.ximat, dtype=np.double, count=(self._size_src.nbody*9)), (self._size_src.nbody, 9, ))
arr.setflags(write=False)
return arr
@ximat.setter
def ximat(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.ximat, val_ptr, self._size_src.nbody*9 * sizeof(c_double))
@property
def xanchor(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.xanchor, dtype=np.double, count=(self._size_src.njnt*3)), (self._size_src.njnt, 3, ))
arr.setflags(write=False)
return arr
@xanchor.setter
def xanchor(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.xanchor, val_ptr, self._size_src.njnt*3 * sizeof(c_double))
@property
def xaxis(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.xaxis, dtype=np.double, count=(self._size_src.njnt*3)), (self._size_src.njnt, 3, ))
arr.setflags(write=False)
return arr
@xaxis.setter
def xaxis(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.xaxis, val_ptr, self._size_src.njnt*3 * sizeof(c_double))
@property
def geom_xpos(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.geom_xpos, dtype=np.double, count=(self._size_src.ngeom*3)), (self._size_src.ngeom, 3, ))
arr.setflags(write=False)
return arr
@geom_xpos.setter
def geom_xpos(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.geom_xpos, val_ptr, self._size_src.ngeom*3 * sizeof(c_double))
@property
def geom_xmat(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.geom_xmat, dtype=np.double, count=(self._size_src.ngeom*9)), (self._size_src.ngeom, 9, ))
arr.setflags(write=False)
return arr
@geom_xmat.setter
def geom_xmat(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.geom_xmat, val_ptr, self._size_src.ngeom*9 * sizeof(c_double))
@property
def site_xpos(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.site_xpos, dtype=np.double, count=(self._size_src.nsite*3)), (self._size_src.nsite, 3, ))
arr.setflags(write=False)
return arr
@site_xpos.setter
def site_xpos(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.site_xpos, val_ptr, self._size_src.nsite*3 * sizeof(c_double))
@property
def site_xmat(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.site_xmat, dtype=np.double, count=(self._size_src.nsite*9)), (self._size_src.nsite, 9, ))
arr.setflags(write=False)
return arr
@site_xmat.setter
def site_xmat(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.site_xmat, val_ptr, self._size_src.nsite*9 * sizeof(c_double))
@property
def cam_xpos(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.cam_xpos, dtype=np.double, count=(self._size_src.ncam*3)), (self._size_src.ncam, 3, ))
arr.setflags(write=False)
return arr
@cam_xpos.setter
def cam_xpos(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.cam_xpos, val_ptr, self._size_src.ncam*3 * sizeof(c_double))
@property
def cam_xmat(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.cam_xmat, dtype=np.double, count=(self._size_src.ncam*9)), (self._size_src.ncam, 9, ))
arr.setflags(write=False)
return arr
@cam_xmat.setter
def cam_xmat(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.cam_xmat, val_ptr, self._size_src.ncam*9 * sizeof(c_double))
@property
def light_xpos(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.light_xpos, dtype=np.double, count=(self._size_src.nlight*3)), (self._size_src.nlight, 3, ))
arr.setflags(write=False)
return arr
@light_xpos.setter
def light_xpos(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.light_xpos, val_ptr, self._size_src.nlight*3 * sizeof(c_double))
@property
def light_xdir(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.light_xdir, dtype=np.double, count=(self._size_src.nlight*3)), (self._size_src.nlight, 3, ))
arr.setflags(write=False)
return arr
@light_xdir.setter
def light_xdir(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.light_xdir, val_ptr, self._size_src.nlight*3 * sizeof(c_double))
@property
def subtree_com(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.subtree_com, dtype=np.double, count=(self._size_src.nbody*3)), (self._size_src.nbody, 3, ))
arr.setflags(write=False)
return arr
@subtree_com.setter
def subtree_com(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.subtree_com, val_ptr, self._size_src.nbody*3 * sizeof(c_double))
@property
def cdof(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.cdof, dtype=np.double, count=(self._size_src.nv*6)), (self._size_src.nv, 6, ))
arr.setflags(write=False)
return arr
@cdof.setter
def cdof(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.cdof, val_ptr, self._size_src.nv*6 * sizeof(c_double))
@property
def cinert(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.cinert, dtype=np.double, count=(self._size_src.nbody*10)), (self._size_src.nbody, 10, ))
arr.setflags(write=False)
return arr
@cinert.setter
def cinert(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.cinert, val_ptr, self._size_src.nbody*10 * sizeof(c_double))
@property
def ten_wrapadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.ten_wrapadr, dtype=np.int, count=(self._size_src.ntendon*1)), (self._size_src.ntendon, 1, ))
arr.setflags(write=False)
return arr
@ten_wrapadr.setter
def ten_wrapadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.ten_wrapadr, val_ptr, self._size_src.ntendon*1 * sizeof(c_int))
@property
def ten_wrapnum(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.ten_wrapnum, dtype=np.int, count=(self._size_src.ntendon*1)), (self._size_src.ntendon, 1, ))
arr.setflags(write=False)
return arr
@ten_wrapnum.setter
def ten_wrapnum(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.ten_wrapnum, val_ptr, self._size_src.ntendon*1 * sizeof(c_int))
@property
def ten_length(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.ten_length, dtype=np.double, count=(self._size_src.ntendon*1)), (self._size_src.ntendon, 1, ))
arr.setflags(write=False)
return arr
@ten_length.setter
def ten_length(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.ten_length, val_ptr, self._size_src.ntendon*1 * sizeof(c_double))
@property
def ten_moment(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.ten_moment, dtype=np.double, count=(self._size_src.ntendon*self._size_src.nv)), (self._size_src.ntendon, self._size_src.nv, ))
arr.setflags(write=False)
return arr
@ten_moment.setter
def ten_moment(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.ten_moment, val_ptr, self._size_src.ntendon*self._size_src.nv * sizeof(c_double))
@property
def wrap_obj(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.wrap_obj, dtype=np.int, count=(self._size_src.nwrap*2)), (self._size_src.nwrap, 2, ))
arr.setflags(write=False)
return arr
@wrap_obj.setter
def wrap_obj(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.wrap_obj, val_ptr, self._size_src.nwrap*2 * sizeof(c_int))
@property
def wrap_xpos(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.wrap_xpos, dtype=np.double, count=(self._size_src.nwrap*6)), (self._size_src.nwrap, 6, ))
arr.setflags(write=False)
return arr
@wrap_xpos.setter
def wrap_xpos(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.wrap_xpos, val_ptr, self._size_src.nwrap*6 * sizeof(c_double))
@property
def actuator_length(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_length, dtype=np.double, count=(self._size_src.nu*1)), (self._size_src.nu, 1, ))
arr.setflags(write=False)
return arr
@actuator_length.setter
def actuator_length(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.actuator_length, val_ptr, self._size_src.nu*1 * sizeof(c_double))
@property
def actuator_moment(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_moment, dtype=np.double, count=(self._size_src.nu*self._size_src.nv)), (self._size_src.nu, self._size_src.nv, ))
arr.setflags(write=False)
return arr
@actuator_moment.setter
def actuator_moment(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.actuator_moment, val_ptr, self._size_src.nu*self._size_src.nv * sizeof(c_double))
@property
def crb(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.crb, dtype=np.double, count=(self._size_src.nbody*10)), (self._size_src.nbody, 10, ))
arr.setflags(write=False)
return arr
@crb.setter
def crb(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.crb, val_ptr, self._size_src.nbody*10 * sizeof(c_double))
@property
def qM(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.qM, dtype=np.double, count=(self._size_src.nM*1)), (self._size_src.nM, 1, ))
arr.setflags(write=False)
return arr
@qM.setter
def qM(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.qM, val_ptr, self._size_src.nM*1 * sizeof(c_double))
@property
def qLD(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.qLD, dtype=np.double, count=(self._size_src.nM*1)), (self._size_src.nM, 1, ))
arr.setflags(write=False)
return arr
@qLD.setter
def qLD(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.qLD, val_ptr, self._size_src.nM*1 * sizeof(c_double))
@property
def qLDiagInv(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.qLDiagInv, dtype=np.double, count=(self._size_src.nv*1)), (self._size_src.nv, 1, ))
arr.setflags(write=False)
return arr
@qLDiagInv.setter
def qLDiagInv(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.qLDiagInv, val_ptr, self._size_src.nv*1 * sizeof(c_double))
@property
def qLDiagSqrtInv(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.qLDiagSqrtInv, dtype=np.double, count=(self._size_src.nv*1)), (self._size_src.nv, 1, ))
arr.setflags(write=False)
return arr
@qLDiagSqrtInv.setter
def qLDiagSqrtInv(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.qLDiagSqrtInv, val_ptr, self._size_src.nv*1 * sizeof(c_double))
@property
def efc_type(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.efc_type, dtype=np.int, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, ))
arr.setflags(write=False)
return arr
@efc_type.setter
def efc_type(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.efc_type, val_ptr, self._size_src.njmax*1 * sizeof(c_int))
@property
def efc_id(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.efc_id, dtype=np.int, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, ))
arr.setflags(write=False)
return arr
@efc_id.setter
def efc_id(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.efc_id, val_ptr, self._size_src.njmax*1 * sizeof(c_int))
@property
def efc_rownnz(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.efc_rownnz, dtype=np.int, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, ))
arr.setflags(write=False)
return arr
@efc_rownnz.setter
def efc_rownnz(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.efc_rownnz, val_ptr, self._size_src.njmax*1 * sizeof(c_int))
@property
def efc_rowadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.efc_rowadr, dtype=np.int, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, ))
arr.setflags(write=False)
return arr
@efc_rowadr.setter
def efc_rowadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.efc_rowadr, val_ptr, self._size_src.njmax*1 * sizeof(c_int))
@property
def efc_colind(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.efc_colind, dtype=np.int, count=(self._size_src.njmax*self._size_src.nv)), (self._size_src.njmax, self._size_src.nv, ))
arr.setflags(write=False)
return arr
@efc_colind.setter
def efc_colind(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.efc_colind, val_ptr, self._size_src.njmax*self._size_src.nv * sizeof(c_int))
@property
def efc_rownnz_T(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.efc_rownnz_T, dtype=np.int, count=(self._size_src.nv*1)), (self._size_src.nv, 1, ))
arr.setflags(write=False)
return arr
@efc_rownnz_T.setter
def efc_rownnz_T(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.efc_rownnz_T, val_ptr, self._size_src.nv*1 * sizeof(c_int))
@property
def efc_rowadr_T(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.efc_rowadr_T, dtype=np.int, count=(self._size_src.nv*1)), (self._size_src.nv, 1, ))
arr.setflags(write=False)
return arr
@efc_rowadr_T.setter
def efc_rowadr_T(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.efc_rowadr_T, val_ptr, self._size_src.nv*1 * sizeof(c_int))
@property
def efc_colind_T(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.efc_colind_T, dtype=np.int, count=(self._size_src.nv*self._size_src.njmax)), (self._size_src.nv, self._size_src.njmax, ))
arr.setflags(write=False)
return arr
@efc_colind_T.setter
def efc_colind_T(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.efc_colind_T, val_ptr, self._size_src.nv*self._size_src.njmax * sizeof(c_int))
@property
def efc_solref(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.efc_solref, dtype=np.double, count=(self._size_src.njmax*2)), (self._size_src.njmax, 2, ))
arr.setflags(write=False)
return arr
@efc_solref.setter
def efc_solref(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.efc_solref, val_ptr, self._size_src.njmax*2 * sizeof(c_double))
@property
def efc_solimp(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.efc_solimp, dtype=np.double, count=(self._size_src.njmax*3)), (self._size_src.njmax, 3, ))
arr.setflags(write=False)
return arr
@efc_solimp.setter
def efc_solimp(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.efc_solimp, val_ptr, self._size_src.njmax*3 * sizeof(c_double))
@property
def efc_margin(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.efc_margin, dtype=np.double, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, ))
arr.setflags(write=False)
return arr
@efc_margin.setter
def efc_margin(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.efc_margin, val_ptr, self._size_src.njmax*1 * sizeof(c_double))
@property
def efc_frictionloss(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.efc_frictionloss, dtype=np.double, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, ))
arr.setflags(write=False)
return arr
@efc_frictionloss.setter
def efc_frictionloss(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.efc_frictionloss, val_ptr, self._size_src.njmax*1 * sizeof(c_double))
@property
def efc_pos(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.efc_pos, dtype=np.double, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, ))
arr.setflags(write=False)
return arr
@efc_pos.setter
def efc_pos(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.efc_pos, val_ptr, self._size_src.njmax*1 * sizeof(c_double))
@property
def efc_J(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.efc_J, dtype=np.double, count=(self._size_src.njmax*self._size_src.nv)), (self._size_src.njmax, self._size_src.nv, ))
arr.setflags(write=False)
return arr
@efc_J.setter
def efc_J(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.efc_J, val_ptr, self._size_src.njmax*self._size_src.nv * sizeof(c_double))
@property
def efc_J_T(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.efc_J_T, dtype=np.double, count=(self._size_src.nv*self._size_src.njmax)), (self._size_src.nv, self._size_src.njmax, ))
arr.setflags(write=False)
return arr
@efc_J_T.setter
def efc_J_T(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.efc_J_T, val_ptr, self._size_src.nv*self._size_src.njmax * sizeof(c_double))
@property
def efc_diagApprox(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.efc_diagApprox, dtype=np.double, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, ))
arr.setflags(write=False)
return arr
@efc_diagApprox.setter
def efc_diagApprox(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.efc_diagApprox, val_ptr, self._size_src.njmax*1 * sizeof(c_double))
@property
def efc_D(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.efc_D, dtype=np.double, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, ))
arr.setflags(write=False)
return arr
@efc_D.setter
def efc_D(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.efc_D, val_ptr, self._size_src.njmax*1 * sizeof(c_double))
@property
def efc_R(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.efc_R, dtype=np.double, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, ))
arr.setflags(write=False)
return arr
@efc_R.setter
def efc_R(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.efc_R, val_ptr, self._size_src.njmax*1 * sizeof(c_double))
@property
def efc_AR(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.efc_AR, dtype=np.double, count=(self._size_src.njmax*self._size_src.njmax)), (self._size_src.njmax, self._size_src.njmax, ))
arr.setflags(write=False)
return arr
@efc_AR.setter
def efc_AR(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.efc_AR, val_ptr, self._size_src.njmax*self._size_src.njmax * sizeof(c_double))
@property
def e_ARchol(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.e_ARchol, dtype=np.double, count=(self._size_src.nemax*self._size_src.nemax)), (self._size_src.nemax, self._size_src.nemax, ))
arr.setflags(write=False)
return arr
@e_ARchol.setter
def e_ARchol(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.e_ARchol, val_ptr, self._size_src.nemax*self._size_src.nemax * sizeof(c_double))
@property
def fc_e_rect(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.fc_e_rect, dtype=np.double, count=(self._size_src.njmax*self._size_src.nemax)), (self._size_src.njmax, self._size_src.nemax, ))
arr.setflags(write=False)
return arr
@fc_e_rect.setter
def fc_e_rect(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.fc_e_rect, val_ptr, self._size_src.njmax*self._size_src.nemax * sizeof(c_double))
@property
def fc_AR(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.fc_AR, dtype=np.double, count=(self._size_src.njmax*self._size_src.njmax)), (self._size_src.njmax, self._size_src.njmax, ))
arr.setflags(write=False)
return arr
@fc_AR.setter
def fc_AR(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.fc_AR, val_ptr, self._size_src.njmax*self._size_src.njmax * sizeof(c_double))
@property
def ten_velocity(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.ten_velocity, dtype=np.double, count=(self._size_src.ntendon*1)), (self._size_src.ntendon, 1, ))
arr.setflags(write=False)
return arr
@ten_velocity.setter
def ten_velocity(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.ten_velocity, val_ptr, self._size_src.ntendon*1 * sizeof(c_double))
@property
def actuator_velocity(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_velocity, dtype=np.double, count=(self._size_src.nu*1)), (self._size_src.nu, 1, ))
arr.setflags(write=False)
return arr
@actuator_velocity.setter
def actuator_velocity(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.actuator_velocity, val_ptr, self._size_src.nu*1 * sizeof(c_double))
@property
def cvel(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.cvel, dtype=np.double, count=(self._size_src.nbody*6)), (self._size_src.nbody, 6, ))
arr.setflags(write=False)
return arr
@cvel.setter
def cvel(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.cvel, val_ptr, self._size_src.nbody*6 * sizeof(c_double))
@property
def cdof_dot(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.cdof_dot, dtype=np.double, count=(self._size_src.nv*6)), (self._size_src.nv, 6, ))
arr.setflags(write=False)
return arr
@cdof_dot.setter
def cdof_dot(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.cdof_dot, val_ptr, self._size_src.nv*6 * sizeof(c_double))
@property
def qfrc_bias(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.qfrc_bias, dtype=np.double, count=(self._size_src.nv*1)), (self._size_src.nv, 1, ))
arr.setflags(write=False)
return arr
@qfrc_bias.setter
def qfrc_bias(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.qfrc_bias, val_ptr, self._size_src.nv*1 * sizeof(c_double))
@property
def qfrc_passive(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.qfrc_passive, dtype=np.double, count=(self._size_src.nv*1)), (self._size_src.nv, 1, ))
arr.setflags(write=False)
return arr
@qfrc_passive.setter
def qfrc_passive(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.qfrc_passive, val_ptr, self._size_src.nv*1 * sizeof(c_double))
@property
def efc_vel(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.efc_vel, dtype=np.double, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, ))
arr.setflags(write=False)
return arr
@efc_vel.setter
def efc_vel(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.efc_vel, val_ptr, self._size_src.njmax*1 * sizeof(c_double))
@property
def efc_aref(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.efc_aref, dtype=np.double, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, ))
arr.setflags(write=False)
return arr
@efc_aref.setter
def efc_aref(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.efc_aref, val_ptr, self._size_src.njmax*1 * sizeof(c_double))
@property
def subtree_linvel(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.subtree_linvel, dtype=np.double, count=(self._size_src.nbody*3)), (self._size_src.nbody, 3, ))
arr.setflags(write=False)
return arr
@subtree_linvel.setter
def subtree_linvel(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.subtree_linvel, val_ptr, self._size_src.nbody*3 * sizeof(c_double))
@property
def subtree_angmom(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.subtree_angmom, dtype=np.double, count=(self._size_src.nbody*3)), (self._size_src.nbody, 3, ))
arr.setflags(write=False)
return arr
@subtree_angmom.setter
def subtree_angmom(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.subtree_angmom, val_ptr, self._size_src.nbody*3 * sizeof(c_double))
@property
def actuator_force(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_force, dtype=np.double, count=(self._size_src.nu*1)), (self._size_src.nu, 1, ))
arr.setflags(write=False)
return arr
@actuator_force.setter
def actuator_force(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.actuator_force, val_ptr, self._size_src.nu*1 * sizeof(c_double))
@property
def qfrc_actuator(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.qfrc_actuator, dtype=np.double, count=(self._size_src.nv*1)), (self._size_src.nv, 1, ))
arr.setflags(write=False)
return arr
@qfrc_actuator.setter
def qfrc_actuator(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.qfrc_actuator, val_ptr, self._size_src.nv*1 * sizeof(c_double))
@property
def qfrc_unc(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.qfrc_unc, dtype=np.double, count=(self._size_src.nv*1)), (self._size_src.nv, 1, ))
arr.setflags(write=False)
return arr
@qfrc_unc.setter
def qfrc_unc(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.qfrc_unc, val_ptr, self._size_src.nv*1 * sizeof(c_double))
@property
def qacc_unc(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.qacc_unc, dtype=np.double, count=(self._size_src.nv*1)), (self._size_src.nv, 1, ))
arr.setflags(write=False)
return arr
@qacc_unc.setter
def qacc_unc(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.qacc_unc, val_ptr, self._size_src.nv*1 * sizeof(c_double))
@property
def efc_b(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.efc_b, dtype=np.double, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, ))
arr.setflags(write=False)
return arr
@efc_b.setter
def efc_b(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.efc_b, val_ptr, self._size_src.njmax*1 * sizeof(c_double))
@property
def fc_b(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.fc_b, dtype=np.double, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, ))
arr.setflags(write=False)
return arr
@fc_b.setter
def fc_b(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.fc_b, val_ptr, self._size_src.njmax*1 * sizeof(c_double))
@property
def efc_force(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.efc_force, dtype=np.double, count=(self._size_src.njmax*1)), (self._size_src.njmax, 1, ))
arr.setflags(write=False)
return arr
@efc_force.setter
def efc_force(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.efc_force, val_ptr, self._size_src.njmax*1 * sizeof(c_double))
@property
def qfrc_constraint(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.qfrc_constraint, dtype=np.double, count=(self._size_src.nv*1)), (self._size_src.nv, 1, ))
arr.setflags(write=False)
return arr
@qfrc_constraint.setter
def qfrc_constraint(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.qfrc_constraint, val_ptr, self._size_src.nv*1 * sizeof(c_double))
@property
def qfrc_inverse(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.qfrc_inverse, dtype=np.double, count=(self._size_src.nv*1)), (self._size_src.nv, 1, ))
arr.setflags(write=False)
return arr
@qfrc_inverse.setter
def qfrc_inverse(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.qfrc_inverse, val_ptr, self._size_src.nv*1 * sizeof(c_double))
@property
def cacc(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.cacc, dtype=np.double, count=(self._size_src.nbody*6)), (self._size_src.nbody, 6, ))
arr.setflags(write=False)
return arr
@cacc.setter
def cacc(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.cacc, val_ptr, self._size_src.nbody*6 * sizeof(c_double))
@property
def cfrc_int(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.cfrc_int, dtype=np.double, count=(self._size_src.nbody*6)), (self._size_src.nbody, 6, ))
arr.setflags(write=False)
return arr
@cfrc_int.setter
def cfrc_int(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.cfrc_int, val_ptr, self._size_src.nbody*6 * sizeof(c_double))
@property
def cfrc_ext(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.cfrc_ext, dtype=np.double, count=(self._size_src.nbody*6)), (self._size_src.nbody, 6, ))
arr.setflags(write=False)
return arr
@cfrc_ext.setter
def cfrc_ext(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.cfrc_ext, val_ptr, self._size_src.nbody*6 * sizeof(c_double))
class MjModelWrapper(object):
def __init__(self, wrapped, size_src=None):
self._wrapped = wrapped
self._size_src = size_src
@property
def ptr(self):
return self._wrapped
@property
def obj(self):
return self._wrapped.contents
@property
def nq(self):
return self._wrapped.contents.nq
@nq.setter
def nq(self, value):
self._wrapped.contents.nq = value
@property
def nv(self):
return self._wrapped.contents.nv
@nv.setter
def nv(self, value):
self._wrapped.contents.nv = value
@property
def nu(self):
return self._wrapped.contents.nu
@nu.setter
def nu(self, value):
self._wrapped.contents.nu = value
@property
def na(self):
return self._wrapped.contents.na
@na.setter
def na(self, value):
self._wrapped.contents.na = value
@property
def nbody(self):
return self._wrapped.contents.nbody
@nbody.setter
def nbody(self, value):
self._wrapped.contents.nbody = value
@property
def njnt(self):
return self._wrapped.contents.njnt
@njnt.setter
def njnt(self, value):
self._wrapped.contents.njnt = value
@property
def ngeom(self):
return self._wrapped.contents.ngeom
@ngeom.setter
def ngeom(self, value):
self._wrapped.contents.ngeom = value
@property
def nsite(self):
return self._wrapped.contents.nsite
@nsite.setter
def nsite(self, value):
self._wrapped.contents.nsite = value
@property
def ncam(self):
return self._wrapped.contents.ncam
@ncam.setter
def ncam(self, value):
self._wrapped.contents.ncam = value
@property
def nlight(self):
return self._wrapped.contents.nlight
@nlight.setter
def nlight(self, value):
self._wrapped.contents.nlight = value
@property
def nmesh(self):
return self._wrapped.contents.nmesh
@nmesh.setter
def nmesh(self, value):
self._wrapped.contents.nmesh = value
@property
def nmeshvert(self):
return self._wrapped.contents.nmeshvert
@nmeshvert.setter
def nmeshvert(self, value):
self._wrapped.contents.nmeshvert = value
@property
def nmeshface(self):
return self._wrapped.contents.nmeshface
@nmeshface.setter
def nmeshface(self, value):
self._wrapped.contents.nmeshface = value
@property
def nmeshgraph(self):
return self._wrapped.contents.nmeshgraph
@nmeshgraph.setter
def nmeshgraph(self, value):
self._wrapped.contents.nmeshgraph = value
@property
def nhfield(self):
return self._wrapped.contents.nhfield
@nhfield.setter
def nhfield(self, value):
self._wrapped.contents.nhfield = value
@property
def nhfielddata(self):
return self._wrapped.contents.nhfielddata
@nhfielddata.setter
def nhfielddata(self, value):
self._wrapped.contents.nhfielddata = value
@property
def ntex(self):
return self._wrapped.contents.ntex
@ntex.setter
def ntex(self, value):
self._wrapped.contents.ntex = value
@property
def ntexdata(self):
return self._wrapped.contents.ntexdata
@ntexdata.setter
def ntexdata(self, value):
self._wrapped.contents.ntexdata = value
@property
def nmat(self):
return self._wrapped.contents.nmat
@nmat.setter
def nmat(self, value):
self._wrapped.contents.nmat = value
@property
def npair(self):
return self._wrapped.contents.npair
@npair.setter
def npair(self, value):
self._wrapped.contents.npair = value
@property
def nexclude(self):
return self._wrapped.contents.nexclude
@nexclude.setter
def nexclude(self, value):
self._wrapped.contents.nexclude = value
@property
def neq(self):
return self._wrapped.contents.neq
@neq.setter
def neq(self, value):
self._wrapped.contents.neq = value
@property
def ntendon(self):
return self._wrapped.contents.ntendon
@ntendon.setter
def ntendon(self, value):
self._wrapped.contents.ntendon = value
@property
def nwrap(self):
return self._wrapped.contents.nwrap
@nwrap.setter
def nwrap(self, value):
self._wrapped.contents.nwrap = value
@property
def nsensor(self):
return self._wrapped.contents.nsensor
@nsensor.setter
def nsensor(self, value):
self._wrapped.contents.nsensor = value
@property
def nnumeric(self):
return self._wrapped.contents.nnumeric
@nnumeric.setter
def nnumeric(self, value):
self._wrapped.contents.nnumeric = value
@property
def nnumericdata(self):
return self._wrapped.contents.nnumericdata
@nnumericdata.setter
def nnumericdata(self, value):
self._wrapped.contents.nnumericdata = value
@property
def ntext(self):
return self._wrapped.contents.ntext
@ntext.setter
def ntext(self, value):
self._wrapped.contents.ntext = value
@property
def ntextdata(self):
return self._wrapped.contents.ntextdata
@ntextdata.setter
def ntextdata(self, value):
self._wrapped.contents.ntextdata = value
@property
def ntuple(self):
return self._wrapped.contents.ntuple
@ntuple.setter
def ntuple(self, value):
self._wrapped.contents.ntuple = value
@property
def ntupledata(self):
return self._wrapped.contents.ntupledata
@ntupledata.setter
def ntupledata(self, value):
self._wrapped.contents.ntupledata = value
@property
def nkey(self):
return self._wrapped.contents.nkey
@nkey.setter
def nkey(self, value):
self._wrapped.contents.nkey = value
@property
def nuser_body(self):
return self._wrapped.contents.nuser_body
@nuser_body.setter
def nuser_body(self, value):
self._wrapped.contents.nuser_body = value
@property
def nuser_jnt(self):
return self._wrapped.contents.nuser_jnt
@nuser_jnt.setter
def nuser_jnt(self, value):
self._wrapped.contents.nuser_jnt = value
@property
def nuser_geom(self):
return self._wrapped.contents.nuser_geom
@nuser_geom.setter
def nuser_geom(self, value):
self._wrapped.contents.nuser_geom = value
@property
def nuser_site(self):
return self._wrapped.contents.nuser_site
@nuser_site.setter
def nuser_site(self, value):
self._wrapped.contents.nuser_site = value
@property
def nuser_tendon(self):
return self._wrapped.contents.nuser_tendon
@nuser_tendon.setter
def nuser_tendon(self, value):
self._wrapped.contents.nuser_tendon = value
@property
def nuser_actuator(self):
return self._wrapped.contents.nuser_actuator
@nuser_actuator.setter
def nuser_actuator(self, value):
self._wrapped.contents.nuser_actuator = value
@property
def nuser_sensor(self):
return self._wrapped.contents.nuser_sensor
@nuser_sensor.setter
def nuser_sensor(self, value):
self._wrapped.contents.nuser_sensor = value
@property
def nnames(self):
return self._wrapped.contents.nnames
@nnames.setter
def nnames(self, value):
self._wrapped.contents.nnames = value
@property
def nM(self):
return self._wrapped.contents.nM
@nM.setter
def nM(self, value):
self._wrapped.contents.nM = value
@property
def nemax(self):
return self._wrapped.contents.nemax
@nemax.setter
def nemax(self, value):
self._wrapped.contents.nemax = value
@property
def njmax(self):
return self._wrapped.contents.njmax
@njmax.setter
def njmax(self, value):
self._wrapped.contents.njmax = value
@property
def nconmax(self):
return self._wrapped.contents.nconmax
@nconmax.setter
def nconmax(self, value):
self._wrapped.contents.nconmax = value
@property
def nstack(self):
return self._wrapped.contents.nstack
@nstack.setter
def nstack(self, value):
self._wrapped.contents.nstack = value
@property
def nuserdata(self):
return self._wrapped.contents.nuserdata
@nuserdata.setter
def nuserdata(self, value):
self._wrapped.contents.nuserdata = value
@property
def nmocap(self):
return self._wrapped.contents.nmocap
@nmocap.setter
def nmocap(self, value):
self._wrapped.contents.nmocap = value
@property
def nsensordata(self):
return self._wrapped.contents.nsensordata
@nsensordata.setter
def nsensordata(self, value):
self._wrapped.contents.nsensordata = value
@property
def nbuffer(self):
return self._wrapped.contents.nbuffer
@nbuffer.setter
def nbuffer(self, value):
self._wrapped.contents.nbuffer = value
@property
def opt(self):
return self._wrapped.contents.opt
@opt.setter
def opt(self, value):
self._wrapped.contents.opt = value
@property
def vis(self):
return self._wrapped.contents.vis
@vis.setter
def vis(self, value):
self._wrapped.contents.vis = value
@property
def stat(self):
return self._wrapped.contents.stat
@stat.setter
def stat(self, value):
self._wrapped.contents.stat = value
@property
def buffer(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.buffer, dtype=np.uint8, count=(self.nbuffer)), (self.nbuffer, ))
arr.setflags(write=False)
return arr
@buffer.setter
def buffer(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte))
memmove(self._wrapped.contents.buffer, val_ptr, self.nbuffer * sizeof(c_ubyte))
@property
def qpos0(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.qpos0, dtype=np.double, count=(self.nq*1)), (self.nq, 1, ))
arr.setflags(write=False)
return arr
@qpos0.setter
def qpos0(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.qpos0, val_ptr, self.nq*1 * sizeof(c_double))
@property
def qpos_spring(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.qpos_spring, dtype=np.double, count=(self.nq*1)), (self.nq, 1, ))
arr.setflags(write=False)
return arr
@qpos_spring.setter
def qpos_spring(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.qpos_spring, val_ptr, self.nq*1 * sizeof(c_double))
@property
def body_parentid(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.body_parentid, dtype=np.int, count=(self.nbody*1)), (self.nbody, 1, ))
arr.setflags(write=False)
return arr
@body_parentid.setter
def body_parentid(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.body_parentid, val_ptr, self.nbody*1 * sizeof(c_int))
@property
def body_rootid(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.body_rootid, dtype=np.int, count=(self.nbody*1)), (self.nbody, 1, ))
arr.setflags(write=False)
return arr
@body_rootid.setter
def body_rootid(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.body_rootid, val_ptr, self.nbody*1 * sizeof(c_int))
@property
def body_weldid(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.body_weldid, dtype=np.int, count=(self.nbody*1)), (self.nbody, 1, ))
arr.setflags(write=False)
return arr
@body_weldid.setter
def body_weldid(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.body_weldid, val_ptr, self.nbody*1 * sizeof(c_int))
@property
def body_mocapid(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.body_mocapid, dtype=np.int, count=(self.nbody*1)), (self.nbody, 1, ))
arr.setflags(write=False)
return arr
@body_mocapid.setter
def body_mocapid(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.body_mocapid, val_ptr, self.nbody*1 * sizeof(c_int))
@property
def body_jntnum(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.body_jntnum, dtype=np.int, count=(self.nbody*1)), (self.nbody, 1, ))
arr.setflags(write=False)
return arr
@body_jntnum.setter
def body_jntnum(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.body_jntnum, val_ptr, self.nbody*1 * sizeof(c_int))
@property
def body_jntadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.body_jntadr, dtype=np.int, count=(self.nbody*1)), (self.nbody, 1, ))
arr.setflags(write=False)
return arr
@body_jntadr.setter
def body_jntadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.body_jntadr, val_ptr, self.nbody*1 * sizeof(c_int))
@property
def body_dofnum(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.body_dofnum, dtype=np.int, count=(self.nbody*1)), (self.nbody, 1, ))
arr.setflags(write=False)
return arr
@body_dofnum.setter
def body_dofnum(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.body_dofnum, val_ptr, self.nbody*1 * sizeof(c_int))
@property
def body_dofadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.body_dofadr, dtype=np.int, count=(self.nbody*1)), (self.nbody, 1, ))
arr.setflags(write=False)
return arr
@body_dofadr.setter
def body_dofadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.body_dofadr, val_ptr, self.nbody*1 * sizeof(c_int))
@property
def body_geomnum(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.body_geomnum, dtype=np.int, count=(self.nbody*1)), (self.nbody, 1, ))
arr.setflags(write=False)
return arr
@body_geomnum.setter
def body_geomnum(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.body_geomnum, val_ptr, self.nbody*1 * sizeof(c_int))
@property
def body_geomadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.body_geomadr, dtype=np.int, count=(self.nbody*1)), (self.nbody, 1, ))
arr.setflags(write=False)
return arr
@body_geomadr.setter
def body_geomadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.body_geomadr, val_ptr, self.nbody*1 * sizeof(c_int))
@property
def body_pos(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.body_pos, dtype=np.double, count=(self.nbody*3)), (self.nbody, 3, ))
arr.setflags(write=False)
return arr
@body_pos.setter
def body_pos(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.body_pos, val_ptr, self.nbody*3 * sizeof(c_double))
@property
def body_quat(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.body_quat, dtype=np.double, count=(self.nbody*4)), (self.nbody, 4, ))
arr.setflags(write=False)
return arr
@body_quat.setter
def body_quat(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.body_quat, val_ptr, self.nbody*4 * sizeof(c_double))
@property
def body_ipos(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.body_ipos, dtype=np.double, count=(self.nbody*3)), (self.nbody, 3, ))
arr.setflags(write=False)
return arr
@body_ipos.setter
def body_ipos(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.body_ipos, val_ptr, self.nbody*3 * sizeof(c_double))
@property
def body_iquat(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.body_iquat, dtype=np.double, count=(self.nbody*4)), (self.nbody, 4, ))
arr.setflags(write=False)
return arr
@body_iquat.setter
def body_iquat(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.body_iquat, val_ptr, self.nbody*4 * sizeof(c_double))
@property
def body_mass(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.body_mass, dtype=np.double, count=(self.nbody*1)), (self.nbody, 1, ))
arr.setflags(write=False)
return arr
@body_mass.setter
def body_mass(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.body_mass, val_ptr, self.nbody*1 * sizeof(c_double))
@property
def body_subtreemass(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.body_subtreemass, dtype=np.double, count=(self.nbody*1)), (self.nbody, 1, ))
arr.setflags(write=False)
return arr
@body_subtreemass.setter
def body_subtreemass(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.body_subtreemass, val_ptr, self.nbody*1 * sizeof(c_double))
@property
def body_inertia(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.body_inertia, dtype=np.double, count=(self.nbody*3)), (self.nbody, 3, ))
arr.setflags(write=False)
return arr
@body_inertia.setter
def body_inertia(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.body_inertia, val_ptr, self.nbody*3 * sizeof(c_double))
@property
def body_invweight0(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.body_invweight0, dtype=np.double, count=(self.nbody*2)), (self.nbody, 2, ))
arr.setflags(write=False)
return arr
@body_invweight0.setter
def body_invweight0(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.body_invweight0, val_ptr, self.nbody*2 * sizeof(c_double))
@property
def body_user(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.body_user, dtype=np.double, count=(self.nbody*self.nuser_body)), (self.nbody, self.nuser_body, ))
arr.setflags(write=False)
return arr
@body_user.setter
def body_user(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.body_user, val_ptr, self.nbody*self.nuser_body * sizeof(c_double))
@property
def jnt_type(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_type, dtype=np.int, count=(self.njnt*1)), (self.njnt, 1, ))
arr.setflags(write=False)
return arr
@jnt_type.setter
def jnt_type(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.jnt_type, val_ptr, self.njnt*1 * sizeof(c_int))
@property
def jnt_qposadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_qposadr, dtype=np.int, count=(self.njnt*1)), (self.njnt, 1, ))
arr.setflags(write=False)
return arr
@jnt_qposadr.setter
def jnt_qposadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.jnt_qposadr, val_ptr, self.njnt*1 * sizeof(c_int))
@property
def jnt_dofadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_dofadr, dtype=np.int, count=(self.njnt*1)), (self.njnt, 1, ))
arr.setflags(write=False)
return arr
@jnt_dofadr.setter
def jnt_dofadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.jnt_dofadr, val_ptr, self.njnt*1 * sizeof(c_int))
@property
def jnt_bodyid(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_bodyid, dtype=np.int, count=(self.njnt*1)), (self.njnt, 1, ))
arr.setflags(write=False)
return arr
@jnt_bodyid.setter
def jnt_bodyid(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.jnt_bodyid, val_ptr, self.njnt*1 * sizeof(c_int))
@property
def jnt_limited(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_limited, dtype=np.uint8, count=(self.njnt*1)), (self.njnt, 1, ))
arr.setflags(write=False)
return arr
@jnt_limited.setter
def jnt_limited(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte))
memmove(self._wrapped.contents.jnt_limited, val_ptr, self.njnt*1 * sizeof(c_ubyte))
@property
def jnt_solref(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_solref, dtype=np.double, count=(self.njnt*2)), (self.njnt, 2, ))
arr.setflags(write=False)
return arr
@jnt_solref.setter
def jnt_solref(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.jnt_solref, val_ptr, self.njnt*2 * sizeof(c_double))
@property
def jnt_solimp(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_solimp, dtype=np.double, count=(self.njnt*3)), (self.njnt, 3, ))
arr.setflags(write=False)
return arr
@jnt_solimp.setter
def jnt_solimp(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.jnt_solimp, val_ptr, self.njnt*3 * sizeof(c_double))
@property
def jnt_pos(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_pos, dtype=np.double, count=(self.njnt*3)), (self.njnt, 3, ))
arr.setflags(write=False)
return arr
@jnt_pos.setter
def jnt_pos(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.jnt_pos, val_ptr, self.njnt*3 * sizeof(c_double))
@property
def jnt_axis(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_axis, dtype=np.double, count=(self.njnt*3)), (self.njnt, 3, ))
arr.setflags(write=False)
return arr
@jnt_axis.setter
def jnt_axis(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.jnt_axis, val_ptr, self.njnt*3 * sizeof(c_double))
@property
def jnt_stiffness(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_stiffness, dtype=np.double, count=(self.njnt*1)), (self.njnt, 1, ))
arr.setflags(write=False)
return arr
@jnt_stiffness.setter
def jnt_stiffness(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.jnt_stiffness, val_ptr, self.njnt*1 * sizeof(c_double))
@property
def jnt_range(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_range, dtype=np.double, count=(self.njnt*2)), (self.njnt, 2, ))
arr.setflags(write=False)
return arr
@jnt_range.setter
def jnt_range(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.jnt_range, val_ptr, self.njnt*2 * sizeof(c_double))
@property
def jnt_margin(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_margin, dtype=np.double, count=(self.njnt*1)), (self.njnt, 1, ))
arr.setflags(write=False)
return arr
@jnt_margin.setter
def jnt_margin(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.jnt_margin, val_ptr, self.njnt*1 * sizeof(c_double))
@property
def jnt_user(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.jnt_user, dtype=np.double, count=(self.njnt*self.nuser_jnt)), (self.njnt, self.nuser_jnt, ))
arr.setflags(write=False)
return arr
@jnt_user.setter
def jnt_user(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.jnt_user, val_ptr, self.njnt*self.nuser_jnt * sizeof(c_double))
@property
def dof_bodyid(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.dof_bodyid, dtype=np.int, count=(self.nv*1)), (self.nv, 1, ))
arr.setflags(write=False)
return arr
@dof_bodyid.setter
def dof_bodyid(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.dof_bodyid, val_ptr, self.nv*1 * sizeof(c_int))
@property
def dof_jntid(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.dof_jntid, dtype=np.int, count=(self.nv*1)), (self.nv, 1, ))
arr.setflags(write=False)
return arr
@dof_jntid.setter
def dof_jntid(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.dof_jntid, val_ptr, self.nv*1 * sizeof(c_int))
@property
def dof_parentid(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.dof_parentid, dtype=np.int, count=(self.nv*1)), (self.nv, 1, ))
arr.setflags(write=False)
return arr
@dof_parentid.setter
def dof_parentid(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.dof_parentid, val_ptr, self.nv*1 * sizeof(c_int))
@property
def dof_Madr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.dof_Madr, dtype=np.int, count=(self.nv*1)), (self.nv, 1, ))
arr.setflags(write=False)
return arr
@dof_Madr.setter
def dof_Madr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.dof_Madr, val_ptr, self.nv*1 * sizeof(c_int))
@property
def dof_frictional(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.dof_frictional, dtype=np.uint8, count=(self.nv*1)), (self.nv, 1, ))
arr.setflags(write=False)
return arr
@dof_frictional.setter
def dof_frictional(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte))
memmove(self._wrapped.contents.dof_frictional, val_ptr, self.nv*1 * sizeof(c_ubyte))
@property
def dof_solref(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.dof_solref, dtype=np.double, count=(self.nv*2)), (self.nv, 2, ))
arr.setflags(write=False)
return arr
@dof_solref.setter
def dof_solref(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.dof_solref, val_ptr, self.nv*2 * sizeof(c_double))
@property
def dof_solimp(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.dof_solimp, dtype=np.double, count=(self.nv*3)), (self.nv, 3, ))
arr.setflags(write=False)
return arr
@dof_solimp.setter
def dof_solimp(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.dof_solimp, val_ptr, self.nv*3 * sizeof(c_double))
@property
def dof_frictionloss(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.dof_frictionloss, dtype=np.double, count=(self.nv*1)), (self.nv, 1, ))
arr.setflags(write=False)
return arr
@dof_frictionloss.setter
def dof_frictionloss(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.dof_frictionloss, val_ptr, self.nv*1 * sizeof(c_double))
@property
def dof_armature(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.dof_armature, dtype=np.double, count=(self.nv*1)), (self.nv, 1, ))
arr.setflags(write=False)
return arr
@dof_armature.setter
def dof_armature(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.dof_armature, val_ptr, self.nv*1 * sizeof(c_double))
@property
def dof_damping(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.dof_damping, dtype=np.double, count=(self.nv*1)), (self.nv, 1, ))
arr.setflags(write=False)
return arr
@dof_damping.setter
def dof_damping(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.dof_damping, val_ptr, self.nv*1 * sizeof(c_double))
@property
def dof_invweight0(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.dof_invweight0, dtype=np.double, count=(self.nv*1)), (self.nv, 1, ))
arr.setflags(write=False)
return arr
@dof_invweight0.setter
def dof_invweight0(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.dof_invweight0, val_ptr, self.nv*1 * sizeof(c_double))
@property
def geom_type(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.geom_type, dtype=np.int, count=(self.ngeom*1)), (self.ngeom, 1, ))
arr.setflags(write=False)
return arr
@geom_type.setter
def geom_type(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.geom_type, val_ptr, self.ngeom*1 * sizeof(c_int))
@property
def geom_contype(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.geom_contype, dtype=np.int, count=(self.ngeom*1)), (self.ngeom, 1, ))
arr.setflags(write=False)
return arr
@geom_contype.setter
def geom_contype(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.geom_contype, val_ptr, self.ngeom*1 * sizeof(c_int))
@property
def geom_conaffinity(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.geom_conaffinity, dtype=np.int, count=(self.ngeom*1)), (self.ngeom, 1, ))
arr.setflags(write=False)
return arr
@geom_conaffinity.setter
def geom_conaffinity(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.geom_conaffinity, val_ptr, self.ngeom*1 * sizeof(c_int))
@property
def geom_condim(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.geom_condim, dtype=np.int, count=(self.ngeom*1)), (self.ngeom, 1, ))
arr.setflags(write=False)
return arr
@geom_condim.setter
def geom_condim(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.geom_condim, val_ptr, self.ngeom*1 * sizeof(c_int))
@property
def geom_bodyid(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.geom_bodyid, dtype=np.int, count=(self.ngeom*1)), (self.ngeom, 1, ))
arr.setflags(write=False)
return arr
@geom_bodyid.setter
def geom_bodyid(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.geom_bodyid, val_ptr, self.ngeom*1 * sizeof(c_int))
@property
def geom_dataid(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.geom_dataid, dtype=np.int, count=(self.ngeom*1)), (self.ngeom, 1, ))
arr.setflags(write=False)
return arr
@geom_dataid.setter
def geom_dataid(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.geom_dataid, val_ptr, self.ngeom*1 * sizeof(c_int))
@property
def geom_matid(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.geom_matid, dtype=np.int, count=(self.ngeom*1)), (self.ngeom, 1, ))
arr.setflags(write=False)
return arr
@geom_matid.setter
def geom_matid(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.geom_matid, val_ptr, self.ngeom*1 * sizeof(c_int))
@property
def geom_group(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.geom_group, dtype=np.int, count=(self.ngeom*1)), (self.ngeom, 1, ))
arr.setflags(write=False)
return arr
@geom_group.setter
def geom_group(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.geom_group, val_ptr, self.ngeom*1 * sizeof(c_int))
@property
def geom_solmix(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.geom_solmix, dtype=np.double, count=(self.ngeom*1)), (self.ngeom, 1, ))
arr.setflags(write=False)
return arr
@geom_solmix.setter
def geom_solmix(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.geom_solmix, val_ptr, self.ngeom*1 * sizeof(c_double))
@property
def geom_solref(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.geom_solref, dtype=np.double, count=(self.ngeom*2)), (self.ngeom, 2, ))
arr.setflags(write=False)
return arr
@geom_solref.setter
def geom_solref(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.geom_solref, val_ptr, self.ngeom*2 * sizeof(c_double))
@property
def geom_solimp(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.geom_solimp, dtype=np.double, count=(self.ngeom*3)), (self.ngeom, 3, ))
arr.setflags(write=False)
return arr
@geom_solimp.setter
def geom_solimp(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.geom_solimp, val_ptr, self.ngeom*3 * sizeof(c_double))
@property
def geom_size(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.geom_size, dtype=np.double, count=(self.ngeom*3)), (self.ngeom, 3, ))
arr.setflags(write=False)
return arr
@geom_size.setter
def geom_size(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.geom_size, val_ptr, self.ngeom*3 * sizeof(c_double))
@property
def geom_rbound(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.geom_rbound, dtype=np.double, count=(self.ngeom*1)), (self.ngeom, 1, ))
arr.setflags(write=False)
return arr
@geom_rbound.setter
def geom_rbound(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.geom_rbound, val_ptr, self.ngeom*1 * sizeof(c_double))
@property
def geom_pos(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.geom_pos, dtype=np.double, count=(self.ngeom*3)), (self.ngeom, 3, ))
arr.setflags(write=False)
return arr
@geom_pos.setter
def geom_pos(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.geom_pos, val_ptr, self.ngeom*3 * sizeof(c_double))
@property
def geom_quat(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.geom_quat, dtype=np.double, count=(self.ngeom*4)), (self.ngeom, 4, ))
arr.setflags(write=False)
return arr
@geom_quat.setter
def geom_quat(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.geom_quat, val_ptr, self.ngeom*4 * sizeof(c_double))
@property
def geom_friction(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.geom_friction, dtype=np.double, count=(self.ngeom*3)), (self.ngeom, 3, ))
arr.setflags(write=False)
return arr
@geom_friction.setter
def geom_friction(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.geom_friction, val_ptr, self.ngeom*3 * sizeof(c_double))
@property
def geom_margin(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.geom_margin, dtype=np.double, count=(self.ngeom*1)), (self.ngeom, 1, ))
arr.setflags(write=False)
return arr
@geom_margin.setter
def geom_margin(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.geom_margin, val_ptr, self.ngeom*1 * sizeof(c_double))
@property
def geom_gap(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.geom_gap, dtype=np.double, count=(self.ngeom*1)), (self.ngeom, 1, ))
arr.setflags(write=False)
return arr
@geom_gap.setter
def geom_gap(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.geom_gap, val_ptr, self.ngeom*1 * sizeof(c_double))
@property
def geom_user(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.geom_user, dtype=np.double, count=(self.ngeom*self.nuser_geom)), (self.ngeom, self.nuser_geom, ))
arr.setflags(write=False)
return arr
@geom_user.setter
def geom_user(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.geom_user, val_ptr, self.ngeom*self.nuser_geom * sizeof(c_double))
@property
def geom_rgba(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.geom_rgba, dtype=np.float, count=(self.ngeom*4)), (self.ngeom, 4, ))
arr.setflags(write=False)
return arr
@geom_rgba.setter
def geom_rgba(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.geom_rgba, val_ptr, self.ngeom*4 * sizeof(c_float))
@property
def site_type(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.site_type, dtype=np.int, count=(self.nsite*1)), (self.nsite, 1, ))
arr.setflags(write=False)
return arr
@site_type.setter
def site_type(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.site_type, val_ptr, self.nsite*1 * sizeof(c_int))
@property
def site_bodyid(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.site_bodyid, dtype=np.int, count=(self.nsite*1)), (self.nsite, 1, ))
arr.setflags(write=False)
return arr
@site_bodyid.setter
def site_bodyid(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.site_bodyid, val_ptr, self.nsite*1 * sizeof(c_int))
@property
def site_matid(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.site_matid, dtype=np.int, count=(self.nsite*1)), (self.nsite, 1, ))
arr.setflags(write=False)
return arr
@site_matid.setter
def site_matid(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.site_matid, val_ptr, self.nsite*1 * sizeof(c_int))
@property
def site_group(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.site_group, dtype=np.int, count=(self.nsite*1)), (self.nsite, 1, ))
arr.setflags(write=False)
return arr
@site_group.setter
def site_group(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.site_group, val_ptr, self.nsite*1 * sizeof(c_int))
@property
def site_size(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.site_size, dtype=np.double, count=(self.nsite*3)), (self.nsite, 3, ))
arr.setflags(write=False)
return arr
@site_size.setter
def site_size(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.site_size, val_ptr, self.nsite*3 * sizeof(c_double))
@property
def site_pos(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.site_pos, dtype=np.double, count=(self.nsite*3)), (self.nsite, 3, ))
arr.setflags(write=False)
return arr
@site_pos.setter
def site_pos(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.site_pos, val_ptr, self.nsite*3 * sizeof(c_double))
@property
def site_quat(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.site_quat, dtype=np.double, count=(self.nsite*4)), (self.nsite, 4, ))
arr.setflags(write=False)
return arr
@site_quat.setter
def site_quat(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.site_quat, val_ptr, self.nsite*4 * sizeof(c_double))
@property
def site_user(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.site_user, dtype=np.double, count=(self.nsite*self.nuser_site)), (self.nsite, self.nuser_site, ))
arr.setflags(write=False)
return arr
@site_user.setter
def site_user(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.site_user, val_ptr, self.nsite*self.nuser_site * sizeof(c_double))
@property
def site_rgba(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.site_rgba, dtype=np.float, count=(self.nsite*4)), (self.nsite, 4, ))
arr.setflags(write=False)
return arr
@site_rgba.setter
def site_rgba(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.site_rgba, val_ptr, self.nsite*4 * sizeof(c_float))
@property
def cam_mode(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.cam_mode, dtype=np.int, count=(self.ncam*1)), (self.ncam, 1, ))
arr.setflags(write=False)
return arr
@cam_mode.setter
def cam_mode(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.cam_mode, val_ptr, self.ncam*1 * sizeof(c_int))
@property
def cam_bodyid(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.cam_bodyid, dtype=np.int, count=(self.ncam*1)), (self.ncam, 1, ))
arr.setflags(write=False)
return arr
@cam_bodyid.setter
def cam_bodyid(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.cam_bodyid, val_ptr, self.ncam*1 * sizeof(c_int))
@property
def cam_targetbodyid(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.cam_targetbodyid, dtype=np.int, count=(self.ncam*1)), (self.ncam, 1, ))
arr.setflags(write=False)
return arr
@cam_targetbodyid.setter
def cam_targetbodyid(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.cam_targetbodyid, val_ptr, self.ncam*1 * sizeof(c_int))
@property
def cam_pos(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.cam_pos, dtype=np.double, count=(self.ncam*3)), (self.ncam, 3, ))
arr.setflags(write=False)
return arr
@cam_pos.setter
def cam_pos(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.cam_pos, val_ptr, self.ncam*3 * sizeof(c_double))
@property
def cam_quat(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.cam_quat, dtype=np.double, count=(self.ncam*4)), (self.ncam, 4, ))
arr.setflags(write=False)
return arr
@cam_quat.setter
def cam_quat(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.cam_quat, val_ptr, self.ncam*4 * sizeof(c_double))
@property
def cam_poscom0(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.cam_poscom0, dtype=np.double, count=(self.ncam*3)), (self.ncam, 3, ))
arr.setflags(write=False)
return arr
@cam_poscom0.setter
def cam_poscom0(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.cam_poscom0, val_ptr, self.ncam*3 * sizeof(c_double))
@property
def cam_pos0(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.cam_pos0, dtype=np.double, count=(self.ncam*3)), (self.ncam, 3, ))
arr.setflags(write=False)
return arr
@cam_pos0.setter
def cam_pos0(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.cam_pos0, val_ptr, self.ncam*3 * sizeof(c_double))
@property
def cam_mat0(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.cam_mat0, dtype=np.double, count=(self.ncam*9)), (self.ncam, 9, ))
arr.setflags(write=False)
return arr
@cam_mat0.setter
def cam_mat0(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.cam_mat0, val_ptr, self.ncam*9 * sizeof(c_double))
@property
def cam_fovy(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.cam_fovy, dtype=np.double, count=(self.ncam*1)), (self.ncam, 1, ))
arr.setflags(write=False)
return arr
@cam_fovy.setter
def cam_fovy(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.cam_fovy, val_ptr, self.ncam*1 * sizeof(c_double))
@property
def cam_ipd(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.cam_ipd, dtype=np.double, count=(self.ncam*1)), (self.ncam, 1, ))
arr.setflags(write=False)
return arr
@cam_ipd.setter
def cam_ipd(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.cam_ipd, val_ptr, self.ncam*1 * sizeof(c_double))
@property
def light_mode(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.light_mode, dtype=np.int, count=(self.nlight*1)), (self.nlight, 1, ))
arr.setflags(write=False)
return arr
@light_mode.setter
def light_mode(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.light_mode, val_ptr, self.nlight*1 * sizeof(c_int))
@property
def light_bodyid(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.light_bodyid, dtype=np.int, count=(self.nlight*1)), (self.nlight, 1, ))
arr.setflags(write=False)
return arr
@light_bodyid.setter
def light_bodyid(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.light_bodyid, val_ptr, self.nlight*1 * sizeof(c_int))
@property
def light_targetbodyid(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.light_targetbodyid, dtype=np.int, count=(self.nlight*1)), (self.nlight, 1, ))
arr.setflags(write=False)
return arr
@light_targetbodyid.setter
def light_targetbodyid(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.light_targetbodyid, val_ptr, self.nlight*1 * sizeof(c_int))
@property
def light_directional(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.light_directional, dtype=np.uint8, count=(self.nlight*1)), (self.nlight, 1, ))
arr.setflags(write=False)
return arr
@light_directional.setter
def light_directional(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte))
memmove(self._wrapped.contents.light_directional, val_ptr, self.nlight*1 * sizeof(c_ubyte))
@property
def light_castshadow(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.light_castshadow, dtype=np.uint8, count=(self.nlight*1)), (self.nlight, 1, ))
arr.setflags(write=False)
return arr
@light_castshadow.setter
def light_castshadow(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte))
memmove(self._wrapped.contents.light_castshadow, val_ptr, self.nlight*1 * sizeof(c_ubyte))
@property
def light_active(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.light_active, dtype=np.uint8, count=(self.nlight*1)), (self.nlight, 1, ))
arr.setflags(write=False)
return arr
@light_active.setter
def light_active(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte))
memmove(self._wrapped.contents.light_active, val_ptr, self.nlight*1 * sizeof(c_ubyte))
@property
def light_pos(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.light_pos, dtype=np.double, count=(self.nlight*3)), (self.nlight, 3, ))
arr.setflags(write=False)
return arr
@light_pos.setter
def light_pos(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.light_pos, val_ptr, self.nlight*3 * sizeof(c_double))
@property
def light_dir(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.light_dir, dtype=np.double, count=(self.nlight*3)), (self.nlight, 3, ))
arr.setflags(write=False)
return arr
@light_dir.setter
def light_dir(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.light_dir, val_ptr, self.nlight*3 * sizeof(c_double))
@property
def light_poscom0(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.light_poscom0, dtype=np.double, count=(self.nlight*3)), (self.nlight, 3, ))
arr.setflags(write=False)
return arr
@light_poscom0.setter
def light_poscom0(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.light_poscom0, val_ptr, self.nlight*3 * sizeof(c_double))
@property
def light_pos0(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.light_pos0, dtype=np.double, count=(self.nlight*3)), (self.nlight, 3, ))
arr.setflags(write=False)
return arr
@light_pos0.setter
def light_pos0(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.light_pos0, val_ptr, self.nlight*3 * sizeof(c_double))
@property
def light_dir0(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.light_dir0, dtype=np.double, count=(self.nlight*3)), (self.nlight, 3, ))
arr.setflags(write=False)
return arr
@light_dir0.setter
def light_dir0(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.light_dir0, val_ptr, self.nlight*3 * sizeof(c_double))
@property
def light_attenuation(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.light_attenuation, dtype=np.float, count=(self.nlight*3)), (self.nlight, 3, ))
arr.setflags(write=False)
return arr
@light_attenuation.setter
def light_attenuation(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.light_attenuation, val_ptr, self.nlight*3 * sizeof(c_float))
@property
def light_cutoff(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.light_cutoff, dtype=np.float, count=(self.nlight*1)), (self.nlight, 1, ))
arr.setflags(write=False)
return arr
@light_cutoff.setter
def light_cutoff(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.light_cutoff, val_ptr, self.nlight*1 * sizeof(c_float))
@property
def light_exponent(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.light_exponent, dtype=np.float, count=(self.nlight*1)), (self.nlight, 1, ))
arr.setflags(write=False)
return arr
@light_exponent.setter
def light_exponent(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.light_exponent, val_ptr, self.nlight*1 * sizeof(c_float))
@property
def light_ambient(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.light_ambient, dtype=np.float, count=(self.nlight*3)), (self.nlight, 3, ))
arr.setflags(write=False)
return arr
@light_ambient.setter
def light_ambient(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.light_ambient, val_ptr, self.nlight*3 * sizeof(c_float))
@property
def light_diffuse(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.light_diffuse, dtype=np.float, count=(self.nlight*3)), (self.nlight, 3, ))
arr.setflags(write=False)
return arr
@light_diffuse.setter
def light_diffuse(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.light_diffuse, val_ptr, self.nlight*3 * sizeof(c_float))
@property
def light_specular(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.light_specular, dtype=np.float, count=(self.nlight*3)), (self.nlight, 3, ))
arr.setflags(write=False)
return arr
@light_specular.setter
def light_specular(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.light_specular, val_ptr, self.nlight*3 * sizeof(c_float))
@property
def mesh_faceadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.mesh_faceadr, dtype=np.int, count=(self.nmesh*1)), (self.nmesh, 1, ))
arr.setflags(write=False)
return arr
@mesh_faceadr.setter
def mesh_faceadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.mesh_faceadr, val_ptr, self.nmesh*1 * sizeof(c_int))
@property
def mesh_facenum(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.mesh_facenum, dtype=np.int, count=(self.nmesh*1)), (self.nmesh, 1, ))
arr.setflags(write=False)
return arr
@mesh_facenum.setter
def mesh_facenum(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.mesh_facenum, val_ptr, self.nmesh*1 * sizeof(c_int))
@property
def mesh_vertadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.mesh_vertadr, dtype=np.int, count=(self.nmesh*1)), (self.nmesh, 1, ))
arr.setflags(write=False)
return arr
@mesh_vertadr.setter
def mesh_vertadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.mesh_vertadr, val_ptr, self.nmesh*1 * sizeof(c_int))
@property
def mesh_vertnum(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.mesh_vertnum, dtype=np.int, count=(self.nmesh*1)), (self.nmesh, 1, ))
arr.setflags(write=False)
return arr
@mesh_vertnum.setter
def mesh_vertnum(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.mesh_vertnum, val_ptr, self.nmesh*1 * sizeof(c_int))
@property
def mesh_graphadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.mesh_graphadr, dtype=np.int, count=(self.nmesh*1)), (self.nmesh, 1, ))
arr.setflags(write=False)
return arr
@mesh_graphadr.setter
def mesh_graphadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.mesh_graphadr, val_ptr, self.nmesh*1 * sizeof(c_int))
@property
def mesh_vert(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.mesh_vert, dtype=np.float, count=(self.nmeshvert*3)), (self.nmeshvert, 3, ))
arr.setflags(write=False)
return arr
@mesh_vert.setter
def mesh_vert(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.mesh_vert, val_ptr, self.nmeshvert*3 * sizeof(c_float))
@property
def mesh_normal(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.mesh_normal, dtype=np.float, count=(self.nmeshvert*3)), (self.nmeshvert, 3, ))
arr.setflags(write=False)
return arr
@mesh_normal.setter
def mesh_normal(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.mesh_normal, val_ptr, self.nmeshvert*3 * sizeof(c_float))
@property
def mesh_face(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.mesh_face, dtype=np.int, count=(self.nmeshface*3)), (self.nmeshface, 3, ))
arr.setflags(write=False)
return arr
@mesh_face.setter
def mesh_face(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.mesh_face, val_ptr, self.nmeshface*3 * sizeof(c_int))
@property
def mesh_graph(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.mesh_graph, dtype=np.int, count=(self.nmeshgraph*1)), (self.nmeshgraph, 1, ))
arr.setflags(write=False)
return arr
@mesh_graph.setter
def mesh_graph(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.mesh_graph, val_ptr, self.nmeshgraph*1 * sizeof(c_int))
@property
def hfield_size(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.hfield_size, dtype=np.double, count=(self.nhfield*4)), (self.nhfield, 4, ))
arr.setflags(write=False)
return arr
@hfield_size.setter
def hfield_size(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.hfield_size, val_ptr, self.nhfield*4 * sizeof(c_double))
@property
def hfield_nrow(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.hfield_nrow, dtype=np.int, count=(self.nhfield*1)), (self.nhfield, 1, ))
arr.setflags(write=False)
return arr
@hfield_nrow.setter
def hfield_nrow(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.hfield_nrow, val_ptr, self.nhfield*1 * sizeof(c_int))
@property
def hfield_ncol(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.hfield_ncol, dtype=np.int, count=(self.nhfield*1)), (self.nhfield, 1, ))
arr.setflags(write=False)
return arr
@hfield_ncol.setter
def hfield_ncol(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.hfield_ncol, val_ptr, self.nhfield*1 * sizeof(c_int))
@property
def hfield_adr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.hfield_adr, dtype=np.int, count=(self.nhfield*1)), (self.nhfield, 1, ))
arr.setflags(write=False)
return arr
@hfield_adr.setter
def hfield_adr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.hfield_adr, val_ptr, self.nhfield*1 * sizeof(c_int))
@property
def hfield_data(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.hfield_data, dtype=np.float, count=(self.nhfielddata*1)), (self.nhfielddata, 1, ))
arr.setflags(write=False)
return arr
@hfield_data.setter
def hfield_data(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.hfield_data, val_ptr, self.nhfielddata*1 * sizeof(c_float))
@property
def tex_type(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tex_type, dtype=np.int, count=(self.ntex*1)), (self.ntex, 1, ))
arr.setflags(write=False)
return arr
@tex_type.setter
def tex_type(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.tex_type, val_ptr, self.ntex*1 * sizeof(c_int))
@property
def tex_height(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tex_height, dtype=np.int, count=(self.ntex*1)), (self.ntex, 1, ))
arr.setflags(write=False)
return arr
@tex_height.setter
def tex_height(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.tex_height, val_ptr, self.ntex*1 * sizeof(c_int))
@property
def tex_width(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tex_width, dtype=np.int, count=(self.ntex*1)), (self.ntex, 1, ))
arr.setflags(write=False)
return arr
@tex_width.setter
def tex_width(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.tex_width, val_ptr, self.ntex*1 * sizeof(c_int))
@property
def tex_adr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tex_adr, dtype=np.int, count=(self.ntex*1)), (self.ntex, 1, ))
arr.setflags(write=False)
return arr
@tex_adr.setter
def tex_adr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.tex_adr, val_ptr, self.ntex*1 * sizeof(c_int))
@property
def tex_rgb(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tex_rgb, dtype=np.uint8, count=(self.ntexdata*1)), (self.ntexdata, 1, ))
arr.setflags(write=False)
return arr
@tex_rgb.setter
def tex_rgb(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte))
memmove(self._wrapped.contents.tex_rgb, val_ptr, self.ntexdata*1 * sizeof(c_ubyte))
@property
def mat_texid(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.mat_texid, dtype=np.int, count=(self.nmat*1)), (self.nmat, 1, ))
arr.setflags(write=False)
return arr
@mat_texid.setter
def mat_texid(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.mat_texid, val_ptr, self.nmat*1 * sizeof(c_int))
@property
def mat_texuniform(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.mat_texuniform, dtype=np.uint8, count=(self.nmat*1)), (self.nmat, 1, ))
arr.setflags(write=False)
return arr
@mat_texuniform.setter
def mat_texuniform(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte))
memmove(self._wrapped.contents.mat_texuniform, val_ptr, self.nmat*1 * sizeof(c_ubyte))
@property
def mat_texrepeat(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.mat_texrepeat, dtype=np.float, count=(self.nmat*2)), (self.nmat, 2, ))
arr.setflags(write=False)
return arr
@mat_texrepeat.setter
def mat_texrepeat(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.mat_texrepeat, val_ptr, self.nmat*2 * sizeof(c_float))
@property
def mat_emission(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.mat_emission, dtype=np.float, count=(self.nmat*1)), (self.nmat, 1, ))
arr.setflags(write=False)
return arr
@mat_emission.setter
def mat_emission(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.mat_emission, val_ptr, self.nmat*1 * sizeof(c_float))
@property
def mat_specular(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.mat_specular, dtype=np.float, count=(self.nmat*1)), (self.nmat, 1, ))
arr.setflags(write=False)
return arr
@mat_specular.setter
def mat_specular(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.mat_specular, val_ptr, self.nmat*1 * sizeof(c_float))
@property
def mat_shininess(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.mat_shininess, dtype=np.float, count=(self.nmat*1)), (self.nmat, 1, ))
arr.setflags(write=False)
return arr
@mat_shininess.setter
def mat_shininess(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.mat_shininess, val_ptr, self.nmat*1 * sizeof(c_float))
@property
def mat_reflectance(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.mat_reflectance, dtype=np.float, count=(self.nmat*1)), (self.nmat, 1, ))
arr.setflags(write=False)
return arr
@mat_reflectance.setter
def mat_reflectance(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.mat_reflectance, val_ptr, self.nmat*1 * sizeof(c_float))
@property
def mat_rgba(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.mat_rgba, dtype=np.float, count=(self.nmat*4)), (self.nmat, 4, ))
arr.setflags(write=False)
return arr
@mat_rgba.setter
def mat_rgba(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.mat_rgba, val_ptr, self.nmat*4 * sizeof(c_float))
@property
def pair_dim(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.pair_dim, dtype=np.int, count=(self.npair*1)), (self.npair, 1, ))
arr.setflags(write=False)
return arr
@pair_dim.setter
def pair_dim(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.pair_dim, val_ptr, self.npair*1 * sizeof(c_int))
@property
def pair_geom1(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.pair_geom1, dtype=np.int, count=(self.npair*1)), (self.npair, 1, ))
arr.setflags(write=False)
return arr
@pair_geom1.setter
def pair_geom1(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.pair_geom1, val_ptr, self.npair*1 * sizeof(c_int))
@property
def pair_geom2(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.pair_geom2, dtype=np.int, count=(self.npair*1)), (self.npair, 1, ))
arr.setflags(write=False)
return arr
@pair_geom2.setter
def pair_geom2(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.pair_geom2, val_ptr, self.npair*1 * sizeof(c_int))
@property
def pair_signature(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.pair_signature, dtype=np.int, count=(self.npair*1)), (self.npair, 1, ))
arr.setflags(write=False)
return arr
@pair_signature.setter
def pair_signature(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.pair_signature, val_ptr, self.npair*1 * sizeof(c_int))
@property
def pair_solref(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.pair_solref, dtype=np.double, count=(self.npair*2)), (self.npair, 2, ))
arr.setflags(write=False)
return arr
@pair_solref.setter
def pair_solref(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.pair_solref, val_ptr, self.npair*2 * sizeof(c_double))
@property
def pair_solimp(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.pair_solimp, dtype=np.double, count=(self.npair*3)), (self.npair, 3, ))
arr.setflags(write=False)
return arr
@pair_solimp.setter
def pair_solimp(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.pair_solimp, val_ptr, self.npair*3 * sizeof(c_double))
@property
def pair_margin(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.pair_margin, dtype=np.double, count=(self.npair*1)), (self.npair, 1, ))
arr.setflags(write=False)
return arr
@pair_margin.setter
def pair_margin(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.pair_margin, val_ptr, self.npair*1 * sizeof(c_double))
@property
def pair_gap(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.pair_gap, dtype=np.double, count=(self.npair*1)), (self.npair, 1, ))
arr.setflags(write=False)
return arr
@pair_gap.setter
def pair_gap(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.pair_gap, val_ptr, self.npair*1 * sizeof(c_double))
@property
def pair_friction(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.pair_friction, dtype=np.double, count=(self.npair*5)), (self.npair, 5, ))
arr.setflags(write=False)
return arr
@pair_friction.setter
def pair_friction(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.pair_friction, val_ptr, self.npair*5 * sizeof(c_double))
@property
def exclude_signature(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.exclude_signature, dtype=np.int, count=(self.nexclude*1)), (self.nexclude, 1, ))
arr.setflags(write=False)
return arr
@exclude_signature.setter
def exclude_signature(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.exclude_signature, val_ptr, self.nexclude*1 * sizeof(c_int))
@property
def eq_type(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.eq_type, dtype=np.int, count=(self.neq*1)), (self.neq, 1, ))
arr.setflags(write=False)
return arr
@eq_type.setter
def eq_type(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.eq_type, val_ptr, self.neq*1 * sizeof(c_int))
@property
def eq_obj1id(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.eq_obj1id, dtype=np.int, count=(self.neq*1)), (self.neq, 1, ))
arr.setflags(write=False)
return arr
@eq_obj1id.setter
def eq_obj1id(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.eq_obj1id, val_ptr, self.neq*1 * sizeof(c_int))
@property
def eq_obj2id(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.eq_obj2id, dtype=np.int, count=(self.neq*1)), (self.neq, 1, ))
arr.setflags(write=False)
return arr
@eq_obj2id.setter
def eq_obj2id(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.eq_obj2id, val_ptr, self.neq*1 * sizeof(c_int))
@property
def eq_active(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.eq_active, dtype=np.uint8, count=(self.neq*1)), (self.neq, 1, ))
arr.setflags(write=False)
return arr
@eq_active.setter
def eq_active(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte))
memmove(self._wrapped.contents.eq_active, val_ptr, self.neq*1 * sizeof(c_ubyte))
@property
def eq_solref(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.eq_solref, dtype=np.double, count=(self.neq*2)), (self.neq, 2, ))
arr.setflags(write=False)
return arr
@eq_solref.setter
def eq_solref(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.eq_solref, val_ptr, self.neq*2 * sizeof(c_double))
@property
def eq_solimp(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.eq_solimp, dtype=np.double, count=(self.neq*3)), (self.neq, 3, ))
arr.setflags(write=False)
return arr
@eq_solimp.setter
def eq_solimp(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.eq_solimp, val_ptr, self.neq*3 * sizeof(c_double))
@property
def eq_data(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.eq_data, dtype=np.double, count=(self.neq*7)), (self.neq, 7, ))
arr.setflags(write=False)
return arr
@eq_data.setter
def eq_data(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.eq_data, val_ptr, self.neq*7 * sizeof(c_double))
@property
def tendon_adr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_adr, dtype=np.int, count=(self.ntendon*1)), (self.ntendon, 1, ))
arr.setflags(write=False)
return arr
@tendon_adr.setter
def tendon_adr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.tendon_adr, val_ptr, self.ntendon*1 * sizeof(c_int))
@property
def tendon_num(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_num, dtype=np.int, count=(self.ntendon*1)), (self.ntendon, 1, ))
arr.setflags(write=False)
return arr
@tendon_num.setter
def tendon_num(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.tendon_num, val_ptr, self.ntendon*1 * sizeof(c_int))
@property
def tendon_matid(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_matid, dtype=np.int, count=(self.ntendon*1)), (self.ntendon, 1, ))
arr.setflags(write=False)
return arr
@tendon_matid.setter
def tendon_matid(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.tendon_matid, val_ptr, self.ntendon*1 * sizeof(c_int))
@property
def tendon_limited(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_limited, dtype=np.uint8, count=(self.ntendon*1)), (self.ntendon, 1, ))
arr.setflags(write=False)
return arr
@tendon_limited.setter
def tendon_limited(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte))
memmove(self._wrapped.contents.tendon_limited, val_ptr, self.ntendon*1 * sizeof(c_ubyte))
@property
def tendon_frictional(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_frictional, dtype=np.uint8, count=(self.ntendon*1)), (self.ntendon, 1, ))
arr.setflags(write=False)
return arr
@tendon_frictional.setter
def tendon_frictional(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte))
memmove(self._wrapped.contents.tendon_frictional, val_ptr, self.ntendon*1 * sizeof(c_ubyte))
@property
def tendon_width(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_width, dtype=np.double, count=(self.ntendon*1)), (self.ntendon, 1, ))
arr.setflags(write=False)
return arr
@tendon_width.setter
def tendon_width(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.tendon_width, val_ptr, self.ntendon*1 * sizeof(c_double))
@property
def tendon_solref_lim(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_solref_lim, dtype=np.double, count=(self.ntendon*2)), (self.ntendon, 2, ))
arr.setflags(write=False)
return arr
@tendon_solref_lim.setter
def tendon_solref_lim(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.tendon_solref_lim, val_ptr, self.ntendon*2 * sizeof(c_double))
@property
def tendon_solimp_lim(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_solimp_lim, dtype=np.double, count=(self.ntendon*3)), (self.ntendon, 3, ))
arr.setflags(write=False)
return arr
@tendon_solimp_lim.setter
def tendon_solimp_lim(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.tendon_solimp_lim, val_ptr, self.ntendon*3 * sizeof(c_double))
@property
def tendon_solref_fri(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_solref_fri, dtype=np.double, count=(self.ntendon*2)), (self.ntendon, 2, ))
arr.setflags(write=False)
return arr
@tendon_solref_fri.setter
def tendon_solref_fri(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.tendon_solref_fri, val_ptr, self.ntendon*2 * sizeof(c_double))
@property
def tendon_solimp_fri(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_solimp_fri, dtype=np.double, count=(self.ntendon*3)), (self.ntendon, 3, ))
arr.setflags(write=False)
return arr
@tendon_solimp_fri.setter
def tendon_solimp_fri(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.tendon_solimp_fri, val_ptr, self.ntendon*3 * sizeof(c_double))
@property
def tendon_range(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_range, dtype=np.double, count=(self.ntendon*2)), (self.ntendon, 2, ))
arr.setflags(write=False)
return arr
@tendon_range.setter
def tendon_range(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.tendon_range, val_ptr, self.ntendon*2 * sizeof(c_double))
@property
def tendon_margin(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_margin, dtype=np.double, count=(self.ntendon*1)), (self.ntendon, 1, ))
arr.setflags(write=False)
return arr
@tendon_margin.setter
def tendon_margin(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.tendon_margin, val_ptr, self.ntendon*1 * sizeof(c_double))
@property
def tendon_stiffness(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_stiffness, dtype=np.double, count=(self.ntendon*1)), (self.ntendon, 1, ))
arr.setflags(write=False)
return arr
@tendon_stiffness.setter
def tendon_stiffness(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.tendon_stiffness, val_ptr, self.ntendon*1 * sizeof(c_double))
@property
def tendon_damping(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_damping, dtype=np.double, count=(self.ntendon*1)), (self.ntendon, 1, ))
arr.setflags(write=False)
return arr
@tendon_damping.setter
def tendon_damping(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.tendon_damping, val_ptr, self.ntendon*1 * sizeof(c_double))
@property
def tendon_frictionloss(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_frictionloss, dtype=np.double, count=(self.ntendon*1)), (self.ntendon, 1, ))
arr.setflags(write=False)
return arr
@tendon_frictionloss.setter
def tendon_frictionloss(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.tendon_frictionloss, val_ptr, self.ntendon*1 * sizeof(c_double))
@property
def tendon_lengthspring(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_lengthspring, dtype=np.double, count=(self.ntendon*1)), (self.ntendon, 1, ))
arr.setflags(write=False)
return arr
@tendon_lengthspring.setter
def tendon_lengthspring(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.tendon_lengthspring, val_ptr, self.ntendon*1 * sizeof(c_double))
@property
def tendon_length0(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_length0, dtype=np.double, count=(self.ntendon*1)), (self.ntendon, 1, ))
arr.setflags(write=False)
return arr
@tendon_length0.setter
def tendon_length0(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.tendon_length0, val_ptr, self.ntendon*1 * sizeof(c_double))
@property
def tendon_invweight0(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_invweight0, dtype=np.double, count=(self.ntendon*1)), (self.ntendon, 1, ))
arr.setflags(write=False)
return arr
@tendon_invweight0.setter
def tendon_invweight0(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.tendon_invweight0, val_ptr, self.ntendon*1 * sizeof(c_double))
@property
def tendon_user(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_user, dtype=np.double, count=(self.ntendon*self.nuser_tendon)), (self.ntendon, self.nuser_tendon, ))
arr.setflags(write=False)
return arr
@tendon_user.setter
def tendon_user(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.tendon_user, val_ptr, self.ntendon*self.nuser_tendon * sizeof(c_double))
@property
def tendon_rgba(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tendon_rgba, dtype=np.float, count=(self.ntendon*4)), (self.ntendon, 4, ))
arr.setflags(write=False)
return arr
@tendon_rgba.setter
def tendon_rgba(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float))
memmove(self._wrapped.contents.tendon_rgba, val_ptr, self.ntendon*4 * sizeof(c_float))
@property
def wrap_type(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.wrap_type, dtype=np.int, count=(self.nwrap*1)), (self.nwrap, 1, ))
arr.setflags(write=False)
return arr
@wrap_type.setter
def wrap_type(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.wrap_type, val_ptr, self.nwrap*1 * sizeof(c_int))
@property
def wrap_objid(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.wrap_objid, dtype=np.int, count=(self.nwrap*1)), (self.nwrap, 1, ))
arr.setflags(write=False)
return arr
@wrap_objid.setter
def wrap_objid(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.wrap_objid, val_ptr, self.nwrap*1 * sizeof(c_int))
@property
def wrap_prm(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.wrap_prm, dtype=np.double, count=(self.nwrap*1)), (self.nwrap, 1, ))
arr.setflags(write=False)
return arr
@wrap_prm.setter
def wrap_prm(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.wrap_prm, val_ptr, self.nwrap*1 * sizeof(c_double))
@property
def actuator_trntype(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_trntype, dtype=np.int, count=(self.nu*1)), (self.nu, 1, ))
arr.setflags(write=False)
return arr
@actuator_trntype.setter
def actuator_trntype(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.actuator_trntype, val_ptr, self.nu*1 * sizeof(c_int))
@property
def actuator_dyntype(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_dyntype, dtype=np.int, count=(self.nu*1)), (self.nu, 1, ))
arr.setflags(write=False)
return arr
@actuator_dyntype.setter
def actuator_dyntype(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.actuator_dyntype, val_ptr, self.nu*1 * sizeof(c_int))
@property
def actuator_gaintype(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_gaintype, dtype=np.int, count=(self.nu*1)), (self.nu, 1, ))
arr.setflags(write=False)
return arr
@actuator_gaintype.setter
def actuator_gaintype(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.actuator_gaintype, val_ptr, self.nu*1 * sizeof(c_int))
@property
def actuator_biastype(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_biastype, dtype=np.int, count=(self.nu*1)), (self.nu, 1, ))
arr.setflags(write=False)
return arr
@actuator_biastype.setter
def actuator_biastype(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.actuator_biastype, val_ptr, self.nu*1 * sizeof(c_int))
@property
def actuator_trnid(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_trnid, dtype=np.int, count=(self.nu*2)), (self.nu, 2, ))
arr.setflags(write=False)
return arr
@actuator_trnid.setter
def actuator_trnid(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.actuator_trnid, val_ptr, self.nu*2 * sizeof(c_int))
@property
def actuator_ctrllimited(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_ctrllimited, dtype=np.uint8, count=(self.nu*1)), (self.nu, 1, ))
arr.setflags(write=False)
return arr
@actuator_ctrllimited.setter
def actuator_ctrllimited(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte))
memmove(self._wrapped.contents.actuator_ctrllimited, val_ptr, self.nu*1 * sizeof(c_ubyte))
@property
def actuator_forcelimited(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_forcelimited, dtype=np.uint8, count=(self.nu*1)), (self.nu, 1, ))
arr.setflags(write=False)
return arr
@actuator_forcelimited.setter
def actuator_forcelimited(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_ubyte))
memmove(self._wrapped.contents.actuator_forcelimited, val_ptr, self.nu*1 * sizeof(c_ubyte))
@property
def actuator_dynprm(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_dynprm, dtype=np.double, count=(self.nu*3)), (self.nu, 3, ))
arr.setflags(write=False)
return arr
@actuator_dynprm.setter
def actuator_dynprm(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.actuator_dynprm, val_ptr, self.nu*3 * sizeof(c_double))
@property
def actuator_gainprm(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_gainprm, dtype=np.double, count=(self.nu*3)), (self.nu, 3, ))
arr.setflags(write=False)
return arr
@actuator_gainprm.setter
def actuator_gainprm(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.actuator_gainprm, val_ptr, self.nu*3 * sizeof(c_double))
@property
def actuator_biasprm(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_biasprm, dtype=np.double, count=(self.nu*3)), (self.nu, 3, ))
arr.setflags(write=False)
return arr
@actuator_biasprm.setter
def actuator_biasprm(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.actuator_biasprm, val_ptr, self.nu*3 * sizeof(c_double))
@property
def actuator_ctrlrange(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_ctrlrange, dtype=np.double, count=(self.nu*2)), (self.nu, 2, ))
arr.setflags(write=False)
return arr
@actuator_ctrlrange.setter
def actuator_ctrlrange(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.actuator_ctrlrange, val_ptr, self.nu*2 * sizeof(c_double))
@property
def actuator_forcerange(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_forcerange, dtype=np.double, count=(self.nu*2)), (self.nu, 2, ))
arr.setflags(write=False)
return arr
@actuator_forcerange.setter
def actuator_forcerange(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.actuator_forcerange, val_ptr, self.nu*2 * sizeof(c_double))
@property
def actuator_gear(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_gear, dtype=np.double, count=(self.nu*6)), (self.nu, 6, ))
arr.setflags(write=False)
return arr
@actuator_gear.setter
def actuator_gear(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.actuator_gear, val_ptr, self.nu*6 * sizeof(c_double))
@property
def actuator_cranklength(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_cranklength, dtype=np.double, count=(self.nu*1)), (self.nu, 1, ))
arr.setflags(write=False)
return arr
@actuator_cranklength.setter
def actuator_cranklength(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.actuator_cranklength, val_ptr, self.nu*1 * sizeof(c_double))
@property
def actuator_invweight0(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_invweight0, dtype=np.double, count=(self.nu*1)), (self.nu, 1, ))
arr.setflags(write=False)
return arr
@actuator_invweight0.setter
def actuator_invweight0(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.actuator_invweight0, val_ptr, self.nu*1 * sizeof(c_double))
@property
def actuator_length0(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_length0, dtype=np.double, count=(self.nu*1)), (self.nu, 1, ))
arr.setflags(write=False)
return arr
@actuator_length0.setter
def actuator_length0(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.actuator_length0, val_ptr, self.nu*1 * sizeof(c_double))
@property
def actuator_lengthrange(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_lengthrange, dtype=np.double, count=(self.nu*2)), (self.nu, 2, ))
arr.setflags(write=False)
return arr
@actuator_lengthrange.setter
def actuator_lengthrange(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.actuator_lengthrange, val_ptr, self.nu*2 * sizeof(c_double))
@property
def actuator_user(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.actuator_user, dtype=np.double, count=(self.nu*self.nuser_actuator)), (self.nu, self.nuser_actuator, ))
arr.setflags(write=False)
return arr
@actuator_user.setter
def actuator_user(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.actuator_user, val_ptr, self.nu*self.nuser_actuator * sizeof(c_double))
@property
def sensor_type(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.sensor_type, dtype=np.int, count=(self.nsensor*1)), (self.nsensor, 1, ))
arr.setflags(write=False)
return arr
@sensor_type.setter
def sensor_type(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.sensor_type, val_ptr, self.nsensor*1 * sizeof(c_int))
@property
def sensor_datatype(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.sensor_datatype, dtype=np.int, count=(self.nsensor*1)), (self.nsensor, 1, ))
arr.setflags(write=False)
return arr
@sensor_datatype.setter
def sensor_datatype(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.sensor_datatype, val_ptr, self.nsensor*1 * sizeof(c_int))
@property
def sensor_needstage(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.sensor_needstage, dtype=np.int, count=(self.nsensor*1)), (self.nsensor, 1, ))
arr.setflags(write=False)
return arr
@sensor_needstage.setter
def sensor_needstage(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.sensor_needstage, val_ptr, self.nsensor*1 * sizeof(c_int))
@property
def sensor_objtype(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.sensor_objtype, dtype=np.int, count=(self.nsensor*1)), (self.nsensor, 1, ))
arr.setflags(write=False)
return arr
@sensor_objtype.setter
def sensor_objtype(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.sensor_objtype, val_ptr, self.nsensor*1 * sizeof(c_int))
@property
def sensor_objid(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.sensor_objid, dtype=np.int, count=(self.nsensor*1)), (self.nsensor, 1, ))
arr.setflags(write=False)
return arr
@sensor_objid.setter
def sensor_objid(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.sensor_objid, val_ptr, self.nsensor*1 * sizeof(c_int))
@property
def sensor_dim(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.sensor_dim, dtype=np.int, count=(self.nsensor*1)), (self.nsensor, 1, ))
arr.setflags(write=False)
return arr
@sensor_dim.setter
def sensor_dim(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.sensor_dim, val_ptr, self.nsensor*1 * sizeof(c_int))
@property
def sensor_adr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.sensor_adr, dtype=np.int, count=(self.nsensor*1)), (self.nsensor, 1, ))
arr.setflags(write=False)
return arr
@sensor_adr.setter
def sensor_adr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.sensor_adr, val_ptr, self.nsensor*1 * sizeof(c_int))
@property
def sensor_noise(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.sensor_noise, dtype=np.double, count=(self.nsensor*1)), (self.nsensor, 1, ))
arr.setflags(write=False)
return arr
@sensor_noise.setter
def sensor_noise(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.sensor_noise, val_ptr, self.nsensor*1 * sizeof(c_double))
@property
def sensor_user(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.sensor_user, dtype=np.double, count=(self.nsensor*self.nuser_sensor)), (self.nsensor, self.nuser_sensor, ))
arr.setflags(write=False)
return arr
@sensor_user.setter
def sensor_user(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.sensor_user, val_ptr, self.nsensor*self.nuser_sensor * sizeof(c_double))
@property
def numeric_adr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.numeric_adr, dtype=np.int, count=(self.nnumeric*1)), (self.nnumeric, 1, ))
arr.setflags(write=False)
return arr
@numeric_adr.setter
def numeric_adr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.numeric_adr, val_ptr, self.nnumeric*1 * sizeof(c_int))
@property
def numeric_size(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.numeric_size, dtype=np.int, count=(self.nnumeric*1)), (self.nnumeric, 1, ))
arr.setflags(write=False)
return arr
@numeric_size.setter
def numeric_size(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.numeric_size, val_ptr, self.nnumeric*1 * sizeof(c_int))
@property
def numeric_data(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.numeric_data, dtype=np.double, count=(self.nnumericdata*1)), (self.nnumericdata, 1, ))
arr.setflags(write=False)
return arr
@numeric_data.setter
def numeric_data(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.numeric_data, val_ptr, self.nnumericdata*1 * sizeof(c_double))
@property
def text_adr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.text_adr, dtype=np.int, count=(self.ntext*1)), (self.ntext, 1, ))
arr.setflags(write=False)
return arr
@text_adr.setter
def text_adr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.text_adr, val_ptr, self.ntext*1 * sizeof(c_int))
@property
def text_size(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.text_size, dtype=np.int, count=(self.ntext*1)), (self.ntext, 1, ))
arr.setflags(write=False)
return arr
@text_size.setter
def text_size(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.text_size, val_ptr, self.ntext*1 * sizeof(c_int))
@property
def text_data(self):
return self._wrapped.contents.text_data
@property
def tuple_adr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tuple_adr, dtype=np.int, count=(self.ntuple*1)), (self.ntuple, 1, ))
arr.setflags(write=False)
return arr
@tuple_adr.setter
def tuple_adr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.tuple_adr, val_ptr, self.ntuple*1 * sizeof(c_int))
@property
def tuple_size(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tuple_size, dtype=np.int, count=(self.ntuple*1)), (self.ntuple, 1, ))
arr.setflags(write=False)
return arr
@tuple_size.setter
def tuple_size(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.tuple_size, val_ptr, self.ntuple*1 * sizeof(c_int))
@property
def tuple_objtype(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tuple_objtype, dtype=np.int, count=(self.ntupledata*1)), (self.ntupledata, 1, ))
arr.setflags(write=False)
return arr
@tuple_objtype.setter
def tuple_objtype(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.tuple_objtype, val_ptr, self.ntupledata*1 * sizeof(c_int))
@property
def tuple_objid(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tuple_objid, dtype=np.int, count=(self.ntupledata*1)), (self.ntupledata, 1, ))
arr.setflags(write=False)
return arr
@tuple_objid.setter
def tuple_objid(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.tuple_objid, val_ptr, self.ntupledata*1 * sizeof(c_int))
@property
def tuple_objprm(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.tuple_objprm, dtype=np.double, count=(self.ntupledata*1)), (self.ntupledata, 1, ))
arr.setflags(write=False)
return arr
@tuple_objprm.setter
def tuple_objprm(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.tuple_objprm, val_ptr, self.ntupledata*1 * sizeof(c_double))
@property
def key_time(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.key_time, dtype=np.double, count=(self.nkey*1)), (self.nkey, 1, ))
arr.setflags(write=False)
return arr
@key_time.setter
def key_time(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.key_time, val_ptr, self.nkey*1 * sizeof(c_double))
@property
def key_qpos(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.key_qpos, dtype=np.double, count=(self.nkey*self.nq)), (self.nkey, self.nq, ))
arr.setflags(write=False)
return arr
@key_qpos.setter
def key_qpos(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.key_qpos, val_ptr, self.nkey*self.nq * sizeof(c_double))
@property
def key_qvel(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.key_qvel, dtype=np.double, count=(self.nkey*self.nv)), (self.nkey, self.nv, ))
arr.setflags(write=False)
return arr
@key_qvel.setter
def key_qvel(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.key_qvel, val_ptr, self.nkey*self.nv * sizeof(c_double))
@property
def key_act(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.key_act, dtype=np.double, count=(self.nkey*self.na)), (self.nkey, self.na, ))
arr.setflags(write=False)
return arr
@key_act.setter
def key_act(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_double))
memmove(self._wrapped.contents.key_act, val_ptr, self.nkey*self.na * sizeof(c_double))
@property
def name_bodyadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.name_bodyadr, dtype=np.int, count=(self.nbody*1)), (self.nbody, 1, ))
arr.setflags(write=False)
return arr
@name_bodyadr.setter
def name_bodyadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.name_bodyadr, val_ptr, self.nbody*1 * sizeof(c_int))
@property
def name_jntadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.name_jntadr, dtype=np.int, count=(self.njnt*1)), (self.njnt, 1, ))
arr.setflags(write=False)
return arr
@name_jntadr.setter
def name_jntadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.name_jntadr, val_ptr, self.njnt*1 * sizeof(c_int))
@property
def name_geomadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.name_geomadr, dtype=np.int, count=(self.ngeom*1)), (self.ngeom, 1, ))
arr.setflags(write=False)
return arr
@name_geomadr.setter
def name_geomadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.name_geomadr, val_ptr, self.ngeom*1 * sizeof(c_int))
@property
def name_siteadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.name_siteadr, dtype=np.int, count=(self.nsite*1)), (self.nsite, 1, ))
arr.setflags(write=False)
return arr
@name_siteadr.setter
def name_siteadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.name_siteadr, val_ptr, self.nsite*1 * sizeof(c_int))
@property
def name_camadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.name_camadr, dtype=np.int, count=(self.ncam*1)), (self.ncam, 1, ))
arr.setflags(write=False)
return arr
@name_camadr.setter
def name_camadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.name_camadr, val_ptr, self.ncam*1 * sizeof(c_int))
@property
def name_lightadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.name_lightadr, dtype=np.int, count=(self.nlight*1)), (self.nlight, 1, ))
arr.setflags(write=False)
return arr
@name_lightadr.setter
def name_lightadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.name_lightadr, val_ptr, self.nlight*1 * sizeof(c_int))
@property
def name_meshadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.name_meshadr, dtype=np.int, count=(self.nmesh*1)), (self.nmesh, 1, ))
arr.setflags(write=False)
return arr
@name_meshadr.setter
def name_meshadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.name_meshadr, val_ptr, self.nmesh*1 * sizeof(c_int))
@property
def name_hfieldadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.name_hfieldadr, dtype=np.int, count=(self.nhfield*1)), (self.nhfield, 1, ))
arr.setflags(write=False)
return arr
@name_hfieldadr.setter
def name_hfieldadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.name_hfieldadr, val_ptr, self.nhfield*1 * sizeof(c_int))
@property
def name_texadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.name_texadr, dtype=np.int, count=(self.ntex*1)), (self.ntex, 1, ))
arr.setflags(write=False)
return arr
@name_texadr.setter
def name_texadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.name_texadr, val_ptr, self.ntex*1 * sizeof(c_int))
@property
def name_matadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.name_matadr, dtype=np.int, count=(self.nmat*1)), (self.nmat, 1, ))
arr.setflags(write=False)
return arr
@name_matadr.setter
def name_matadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.name_matadr, val_ptr, self.nmat*1 * sizeof(c_int))
@property
def name_eqadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.name_eqadr, dtype=np.int, count=(self.neq*1)), (self.neq, 1, ))
arr.setflags(write=False)
return arr
@name_eqadr.setter
def name_eqadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.name_eqadr, val_ptr, self.neq*1 * sizeof(c_int))
@property
def name_tendonadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.name_tendonadr, dtype=np.int, count=(self.ntendon*1)), (self.ntendon, 1, ))
arr.setflags(write=False)
return arr
@name_tendonadr.setter
def name_tendonadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.name_tendonadr, val_ptr, self.ntendon*1 * sizeof(c_int))
@property
def name_actuatoradr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.name_actuatoradr, dtype=np.int, count=(self.nu*1)), (self.nu, 1, ))
arr.setflags(write=False)
return arr
@name_actuatoradr.setter
def name_actuatoradr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.name_actuatoradr, val_ptr, self.nu*1 * sizeof(c_int))
@property
def name_sensoradr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.name_sensoradr, dtype=np.int, count=(self.nsensor*1)), (self.nsensor, 1, ))
arr.setflags(write=False)
return arr
@name_sensoradr.setter
def name_sensoradr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.name_sensoradr, val_ptr, self.nsensor*1 * sizeof(c_int))
@property
def name_numericadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.name_numericadr, dtype=np.int, count=(self.nnumeric*1)), (self.nnumeric, 1, ))
arr.setflags(write=False)
return arr
@name_numericadr.setter
def name_numericadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.name_numericadr, val_ptr, self.nnumeric*1 * sizeof(c_int))
@property
def name_textadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.name_textadr, dtype=np.int, count=(self.ntext*1)), (self.ntext, 1, ))
arr.setflags(write=False)
return arr
@name_textadr.setter
def name_textadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.name_textadr, val_ptr, self.ntext*1 * sizeof(c_int))
@property
def name_tupleadr(self):
arr = np.reshape(np.fromiter(self._wrapped.contents.name_tupleadr, dtype=np.int, count=(self.ntuple*1)), (self.ntuple, 1, ))
arr.setflags(write=False)
return arr
@name_tupleadr.setter
def name_tupleadr(self, value):
val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_int))
memmove(self._wrapped.contents.name_tupleadr, val_ptr, self.ntuple*1 * sizeof(c_int))
@property
def names(self):
return self._wrapped.contents.names
| mit | 4,527,838,158,645,334,000 | 35.583167 | 187 | 0.619699 | false | 3.239554 | false | false | false |
ismaelpuerto/i3pystatus | i3pystatus/core/color.py | 22 | 1863 | from colour import Color
class ColorRangeModule(object):
"""
Class to dynamically generate and select colors.
Requires the PyPI package `colour`
"""
start_color = "#00FF00"
end_color = 'red'
@staticmethod
def get_hex_color_range(start_color, end_color, quantity):
"""
Generates a list of quantity Hex colors from start_color to end_color.
:param start_color: Hex or plain English color for start of range
:param end_color: Hex or plain English color for end of range
:param quantity: Number of colours to return
:return: A list of Hex color values
"""
raw_colors = [c.hex for c in list(Color(start_color).range_to(Color(end_color), quantity))]
colors = []
for color in raw_colors:
# i3bar expects the full Hex value but for some colors the colour
# module only returns partial values. So we need to convert these colors to the full
# Hex value.
if len(color) == 4:
fixed_color = "#"
for c in color[1:]:
fixed_color += c * 2
colors.append(fixed_color)
else:
colors.append(color)
return colors
def get_gradient(self, value, colors, upper_limit=100):
"""
Map a value to a color
:param value: Some value
:return: A Hex color code
"""
index = int(self.percentage(value, upper_limit))
if index >= len(colors):
return colors[-1]
elif index < 0:
return colors[0]
else:
return colors[index]
@staticmethod
def percentage(part, whole):
"""
Calculate percentage
"""
if whole == 0:
return 0
return 100 * float(part) / float(whole)
| mit | 5,067,627,435,876,962,000 | 29.540984 | 99 | 0.559313 | false | 4.425178 | false | false | false |
nightmarebadger/tutorials-python-basic | basic/classes/class_inheritance.py | 1 | 2060 | # -*- coding: utf-8 -*-
"""
An example of class inheritance.
"""
class Enemy(object):
"""A stupid enemy that doesn't know how to attack, but knows how to die.
>>> stupid = Enemy(10)
Let's hit him and see if he dies
>>> stupid.take_damage(5)
>>> stupid.alive
True
Nope, not dead yet ... let's try again!
>>> stupid.take_damage(5)
>>> stupid.alive
False
Woohoo, down you go stupid enemy!
"""
def __init__(self, hp):
self.hp = hp
self.alive = True
def take_damage(self, dmg):
"""Take some damage and check your HP for death."""
self.hp -= dmg
self.check_hp()
def die(self):
"""Function called when the enemy dies."""
self.alive = False
def check_hp(self):
"""If HP is too low, die."""
if self.hp <= 0:
self.die()
class Shaman(Enemy):
"""A smarter enemy - can do everything Enemy can, but can also heal
himself.
>>> shaman = Shaman(12)
Let's hit him and check if he was damaged
>>> shaman.take_damage(5)
>>> shaman.alive
True
>>> shaman.hp
7
Nope, not dead yet ... let's try again!
>>> shaman.take_damage(5)
>>> shaman.alive
True
>>> shaman.hp
2
Oops, better heal yourself fast shaman!
>>> shaman.heal(20)
>>> shaman.hp
22
Wow, that was a strong heal ... better bring out the big guns!
>>> shaman.take_damage(100)
>>> shaman.hp
-78
>>> shaman.alive
False
Wait ... what are you trying to do?
>>> shaman.heal(100)
>>> shaman.hp
-78
>>> shaman.alive
False
Silly shaman, you can't heal yourself if you're already dead ...
"""
def __init__(self, hp):
"""Call the __init__ from our superclass."""
super(Shaman, self).__init__(hp)
def heal(self, hp):
"""Heal himself. Can only do that if he is alive."""
if self.alive:
self.hp += hp
if __name__ == "__main__":
import doctest
doctest.testmod()
| mit | 5,199,249,915,120,525,000 | 17.230088 | 76 | 0.542233 | false | 3.280255 | false | false | false |
alexandrem/ansible-openstack-config-gen | config_parser.py | 2 | 5526 | from os.path import basename, splitext
from datetime import datetime
from collections import OrderedDict
import re
import yaml
from oslo_config import iniparser
VERSION = "0.6.0"
class OSConfigParser(iniparser.BaseParser):
comment_called = False
values = None
section = ''
comments = []
commented = False
def __init__(self):
self.values = OrderedDict()
def assignment(self, key, value):
self.values.setdefault(self.section, {'comments': [], 'entries': {}})
self.values[self.section]['entries'][key] = {
'value': value,
'comments': self.comments,
'commented': self.commented
}
self.comments = []
self.commented = False
def new_section(self, section):
self.section = section
self.values[self.section] = {
'comments': self.comments,
'entries': OrderedDict()
}
self.comments = []
def comment(self, comment):
if len(comment) > 0 and comment[0].isalpha() and '=' in comment:
self.commented = True
self.parse([comment])
self.comments = []
else:
if False and ' = ' in comment:
self.commented = True
try:
self.parse([comment[1:]])
self.comments = []
return
except:
pass
self.comments.append(comment.lstrip())
def parse(self, lineiter):
key = None
value = []
for line in lineiter:
self.lineno += 1
line = line.rstrip()
if not line:
# Blank line, ends multi-line values
if key:
key, value = self._assignment(key, value)
continue
elif line.startswith((' ', '\t')):
# Continuation of previous assignment
if key is None:
self.error_unexpected_continuation(line)
else:
value.append(line.lstrip())
continue
if key:
# Flush previous assignment, if any
key, value = self._assignment(key, value)
if line.startswith('['):
# Section start
section = self._get_section(line)
if section:
self.new_section(section)
elif line.startswith(('#', ';')):
self.comment(line[1:])
else:
key, value = self._split_key_value(line)
if not key:
return self.error_empty_key(line)
if key:
# Flush previous assignment, if any
self._assignment(key, value)
def show_header(fpath, namespace, prefix, desc='', yaml=True):
date = datetime.strftime(datetime.today(), "%Y-%m-%d")
print "#"
print "# AUTOMATICALLY GENERATED ON {0}".format(date)
print "# ansible-openstack-config-gen version: {0}".format(VERSION)
print "#"
if desc:
print "# {0}".format(desc)
print "# file: {0}".format(basename(fpath))
print "# namespace: {0}".format(namespace)
print "# prefix: {0}".format(prefix)
print "#"
if yaml:
print "---"
else:
print ""
def print_comments(comments, newline=0):
for cmt in comments:
print '# {0}'.format(cmt)
for x in range(newline):
print "\n"
def var_namespace(fpath, name):
ns = splitext(basename(fpath.lower()).replace('-', '_'))[0]
if not ns.startswith(name):
ns = "{0}_{1}".format(name, ns)
return ns
def infer_type(comments):
text = ' '.join(comments)
if '(multi valued)' in text:
return 'multi'
if '(list value)' in text:
return 'list'
if '(integer value)' in text:
return 'int'
if '(string value)' in text:
return 'str'
if '(boolean value)' in text:
return 'bool'
return None
def format_var_name(name):
name = name.replace('-', '_').lower()
return re.sub(r'[^a-zA-Z0-9_]', '', name)
def value_to_yaml(entry):
value_type = infer_type(entry['comments'])
def convert_to_none(val, keep_string=True):
if value_type == 'int':
val = None
elif value_type == 'multi':
val = None
elif value_type == 'bool':
val = None
elif value_type == 'list':
val = []
elif value_type != 'str' or not keep_string:
val = ''
return val
if len(entry['value']) == 1:
val = entry['value'][0]
if val.startswith('<') and val.endswith('>'):
val = convert_to_none(val, keep_string=False)
else:
try:
ori_val = val
val = yaml.load(val)
if val is None:
val = convert_to_none(val, keep_string=False)
elif val == 'None':
val = convert_to_none(val)
else:
if value_type == 'str' and type(val) is dict:
#print "FUCK PREVENTION: use scalar instead of nested dict"
val = ori_val
elif value_type == 'str' and type(val) is bool:
val = ori_val
except yaml.scanner.ScannerError:
pass
return val
else:
raise Exception("Cannot convert multiple values %s" % values)
| mit | -3,406,857,260,455,001,600 | 27.484536 | 83 | 0.503257 | false | 4.23773 | false | false | false |
Xicnet/burnstation | daemon/burnstation_daemon.py | 1 | 6793 | #!/usr/bin/python
#
# mediabase daemon - creates users' homes, imports files and generates thumbnails
# by rama@r23.cc
MODULE = 'daemon'
import SocketServer
import sys, os, os.path
sys.path.append(os.getcwd())
sys.path.append("..")
from LoadConfig import config
from ErrorsHandler import *
from gst_player import OggPlayer
#from mpd_player import OggPlayer
PORT = 4096
#-----------------------------------------------------------------
# server
#-----------------------------------------------------------------
p = OggPlayer()
class TCPRequestHandler(SocketServer.BaseRequestHandler ):
global p
#--------------------------------------------------------------------
def setup(self):
self.player = p
#logger.info( str(self.client_address), 'connected!' )
logger.info( 'Client connected!' )
welcome = 'OK Welcome to burnstation server.'
#self.request.send(welcome + ' Hi '+ str(self.client_address) + '!\n')
#self.request.send('player status: %s\n' % self.player.status)
self.QUIT = False
#--------------------------------------------------------------------
def handle(self):
while 1:
data = self.request.recv(10240)
logger.info( 'OK Got command: ' + data.strip() )
if data.strip() == 'QUIT':
logger.info( 'quitting..' )
return
else:
if ( data[:5] == 'PLAY ' ):
file = data[5:]
#self.request.send("Playing file: %s" % file)
try:
self.player.AddToPlaylist(file)
self.request.close()
self.player.Play()
except Exception, e: logger.error("burnstation daemon EXCEPTION: " + str(e))
#self.request.send('player status: %s\n' % self.player.status)
return
elif ( data[:5] == 'STOP ' ):
self.player.Stop()
return
elif ( data[:5] == 'SEEK ' ):
position = data[5:]
try: self.player.Seek(int(position))
except Exception, e: logger.error("burnstation daemon EXCEPTION: " + str(e))
#self.request.send('player status: %s\n' % self.player.status)
return
elif ( data[:5] == 'VOLU ' ):
level = float(data[5:])
try: self.player.SetVolume(level)
except Exception, e: logger.error("burnstation daemon EXCEPTION: " + str(e))
return
elif ( data[:5] == 'BURN_' ):
try:
#mode = data[5:6]
#tracks = data[7:-1].split(':')
commands = data.split("|||")
print "COMMANDS : ", commands
mode = commands[0].split("_")[1]
print "MODE : ", mode
tracks = commands[1].split(":")
if mode == 'A': mode = 'AUDIO'
elif mode == 'D': mode = 'DATA'
elif mode == 'U': mode = 'USB'
#logger.debug(mode)
#logger.debug(tracks)
# FIXME : ugly hardcode
home = "/usr/share/burnstation"
cmd = home + '/burn.py'
args = [cmd, mode]
for track in tracks:
if track != '':
args.append(track)
logger.debug("-------------------------------")
logger.debug("TRACKS")
logger.debug(tracks)
logger.debug("-------------------------------")
logger.debug(args)
logger.debug("-------------------------------")
logger.debug(args)
logger.debug("-------------------------------")
try:
logger.debug("Spawning burn script..")
b = os.spawnve(os.P_NOWAIT, cmd, args, os.environ)
except Exception, e: logger.error("burnstation daemon EXCEPTION: " + str(e))
return
except Exception, e:
logger.error("EXCEPTION: %s" % str(e))
else:
#self.request.send('ERR command not found: %s\n' % data)
logger.error('ERR command not found: %s\n' % data)
return
#--------------------------------------------------------------------
def finish(self):
#print self.client_address, 'disconnected!'
logger.info( 'Client ready for disconnection!' )
try: self.request.send('OK bye ' + str(self.client_address) + '\n')
except Exception, e: logger.error("burnstation daemon EXCEPTION: " + str(e))
logger.info( 'Disconnecting' )
#self.QUIT = True
return
#--------------------------------------------------------------------
def stop_daemon():
stop_daemon_cmd = "kill -9 `ps ax | grep python.*burnstation_daemon | grep -v grep | awk '{print $1}'`"
os.system(stop_daemon_cmd)
#--------------------------------------------------------------------
if __name__ == '__main__':
print 1
if len(sys.argv) < 2:
print "Usage: burnstation_daemon.py [start|stop]"
sys.exit(0)
if sys.argv[1] == 'stop':
# stop the daemon
stop_daemon()
sys.exit(0)
print 2
try:
pid = os.fork()
print "start"
if pid > 0:
print "daemon PID is: " + str(pid)
sys.exit(0)
except OSError, e:
print >>sys.stderr, "fork failed: %d (%s)" % (e.errno, e.strerror)
sys.exit(1)
try:
# server host is a tuple ('host', PORT)
tcpserver = SocketServer.ThreadingTCPServer(('127.0.0.1', PORT), TCPRequestHandler)
tcpserver.allow_reuse_address = True
tcpserver.serve_forever()
except Exception, e:
logger.error(MODULE+" EXCEPTION: " + str(e))
logger.error(MODULE+" NOT starting")
"""
if __name__ == '__main__':
try:
pid = os.fork()
if pid > 0:
sys.exit(0)
except OSError, e:
print >>sys.stderr, "fork failed: %d (%s)" % (e.errno, e.strerror)
sys.exit(1)
tcpserver = SocketServer.ThreadingTCPServer(('127.0.0.1', PORT), TCPRequestHandler)
tcpserver.allow_reuse_address = True
tcpserver.serve_forever()
"""
| gpl-3.0 | -3,496,441,776,671,483,000 | 36.530387 | 107 | 0.430001 | false | 4.451507 | false | false | false |
qxf2/qxf2-page-object-model | utils/BrowserStack_Library.py | 1 | 3679 | """
First version of a library to interact with BrowserStack's artifacts.
For now, this is useful for:
a) Obtaining the session URL
b) Obtaining URLs of screenshots
To do:
a) Handle expired sessions better
"""
import os,requests
from conf import remote_credentials as remote_credentials
class BrowserStack_Library():
"BrowserStack library to interact with BrowserStack artifacts"
def __init__(self):
"Constructor for the BrowserStack library"
self.browserstack_url = "https://www.browserstack.com/automate/"
self.auth = self.get_auth()
def get_auth(self):
"Set up the auth object for the Requests library"
USERNAME = remote_credentials.USERNAME
PASSWORD = remote_credentials.ACCESS_KEY
auth = (USERNAME,PASSWORD)
return auth
def get_build_id(self):
"Get the build ID"
self.build_url = self.browserstack_url + "builds.json"
builds = requests.get(self.build_url, auth=self.auth).json()
build_id = builds[0]['automation_build']['hashed_id']
return build_id
def get_sessions(self):
"Get a JSON object with all the sessions"
build_id = self.get_build_id()
sessions= requests.get(self.browserstack_url + 'builds/%s/sessions.json'%build_id, auth=self.auth).json()
return sessions
def get_active_session_id(self):
"Return the session ID of the first active session"
session_id = None
sessions = self.get_sessions()
for session in sessions:
#Get session id of the first session with status = running
if session['automation_session']['status']=='running':
session_id = session['automation_session']['hashed_id']
break
return session_id
def get_session_url(self):
"Get the session URL"
build_id = self.get_build_id()
session_id = self.get_active_session_id()
session_url = self.browserstack_url + 'builds/%s/sessions/%s'%(build_id,session_id)
return session_url
def get_session_logs(self):
"Return the session log in text format"
build_id = self.get_build_id()
session_id = self.get_active_session_id()
session_log = requests.get(self.browserstack_url + 'builds/%s/sessions/%s/logs'%(build_id,session_id),auth=self.auth).text
return session_log
def get_latest_screenshot_url(self):
"Get the URL of the latest screenshot"
session_log = self.get_session_logs()
#Process the text to locate the URL of the last screenshot
#Extract the https://s2.amazonaws from example lines:
#2016-2-9 4:42:39:52 RESPONSE {"state":"success","sessionId":"f77e1de6e4f42a72e6a6ecfd80ed07b95036ca35","hCode":29018101,"value":"https://s3.amazonaws.com/testautomation/f77e1de6e4f42a72e6a6ecfd80ed07b95036ca35/screenshot-selenium-b14d4ec62a.png","class":"org.openqa.selenium.remote.Response","status":0}
#[2016-2-9 4:42:45:892] REQUEST [[2016-2-9 4:42:45:892]] GET /session/f77e1de6e4f42a72e6a6ecfd80ed07b95036ca35/title {}
#2016-2-9 4:42:45:957 RESPONSE {"state":"success","sessionId":"f77e1de6e4f42a72e6a6ecfd80ed07b95036ca35","hCode":19687124,"value":"New Member Registration & Signup - Chess.com","class":"org.openqa.selenium.remote.Response","status":0}
screenshot_request = session_log.split('screenshot {}')[-1]
response_result = screenshot_request.split('REQUEST')[0]
image_url = response_result.split('https://')[-1]
image_url = image_url.split('.png')[0]
screenshot_url = 'https://' + image_url + '.png'
return screenshot_url
| mit | 150,663,159,781,676,300 | 36.927835 | 312 | 0.661049 | false | 3.523946 | false | false | false |
phenoxim/nova | placement-api-ref/source/conf.py | 1 | 3025 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# placement-api-ref documentation build configuration file, created by
# sphinx-quickstart on Sat May 1 15:17:47 2010.
#
# This file is execfile()d with the current directory set to
# its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
from nova.version import version_info
extensions = [
'openstackdocstheme',
'os_api_ref',
]
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Placement API Reference'
copyright = u'2010-present, OpenStack Foundation'
# openstackdocstheme options
repository_name = 'openstack/nova'
bug_project = 'nova'
bug_tag = 'placement-api-ref'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = version_info.release_string()
# The short X.Y version.
version = version_info.version_string()
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'openstackdocs'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
"sidebar_mode": "toc",
}
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%Y-%m-%d %H:%M'
# -- Options for LaTeX output -------------------------------------------------
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'Placement.tex', u'OpenStack Placement API Documentation',
u'OpenStack Foundation', 'manual'),
]
| apache-2.0 | -3,952,762,799,330,654,000 | 32.611111 | 79 | 0.705124 | false | 4.006623 | false | false | false |
dereulenspiegel/spotimc | resources/libs/spotimcgui/views/playlists/detail.py | 1 | 8685 | '''
Copyright 2011 Mikel Azkolain
This file is part of Spotimc.
Spotimc is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Spotimc is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Spotimc. If not, see <http://www.gnu.org/licenses/>.
'''
import xbmcgui
from spotimcgui.views import BaseListContainerView, iif
import loaders
from spotify import link, track
from spotimcgui.views.album import AlbumTracksView
from spotimcgui.views.artists import open_artistbrowse_albums
from spotimcgui.settings import SettingsManager
class PlaylistDetailView(BaseListContainerView):
container_id = 1800
list_id = 1801
BrowseArtistButton = 5811
BrowseAlbumButton = 5812
context_toggle_star = 5813
__loader = None
__playlist = None
def __init__(self, session, playlist, playlist_manager):
self.__playlist = playlist
self.__loader = loaders.FullPlaylistLoader(
session, playlist, playlist_manager
)
def _set_loader(self, loader):
self.__loader = loader
def _set_playlist(self, playlist):
self.__playlist = playlist
def _browse_artist(self, view_manager):
item = self.get_list(view_manager).getSelectedItem()
pos = int(item.getProperty('ListIndex'))
track = self.__loader.get_track(pos)
artist_list = [artist for artist in track.artists()]
open_artistbrowse_albums(view_manager, artist_list)
def click(self, view_manager, control_id):
session = view_manager.get_var('session')
if control_id == PlaylistDetailView.list_id:
item = self.get_list(view_manager).getSelectedItem()
pos = int(item.getProperty('ListIndex'))
print 'clicked pos: %s' % pos
playlist_manager = view_manager.get_var('playlist_manager')
playlist_manager.play(self.__loader.get_tracks(), session, pos)
elif control_id == PlaylistDetailView.BrowseArtistButton:
self._browse_artist(view_manager)
elif control_id == PlaylistDetailView.BrowseAlbumButton:
item = self.get_list(view_manager).getSelectedItem()
pos = int(item.getProperty('ListIndex'))
album = self.__loader.get_track(pos).album()
v = AlbumTracksView(view_manager.get_var('session'), album)
view_manager.add_view(v)
elif control_id == PlaylistDetailView.context_toggle_star:
item = self.get_list(view_manager).getSelectedItem()
pos = int(item.getProperty("ListIndex"))
if pos is not None:
session = view_manager.get_var('session')
current_track = self.__loader.get_track(pos)
if item.getProperty('IsStarred') == 'true':
item.setProperty('IsStarred', 'false')
track.set_starred(session, [current_track], False)
else:
item.setProperty('IsStarred', 'true')
track.set_starred(session, [current_track], True)
def get_container(self, view_manager):
return view_manager.get_window().getControl(PlaylistDetailView.container_id)
def get_list(self, view_manager):
return view_manager.get_window().getControl(PlaylistDetailView.list_id)
def _get_playlist_length_str(self):
total_duration = 0
for track in self.__playlist.tracks():
total_duration += track.duration() / 1000
#Now the string ranges
one_minute = 60
one_hour = 3600
one_day = 3600 * 24
if total_duration > one_day:
num_days = int(round(total_duration / one_day))
if num_days == 1:
return 'one day'
else:
return '%d days' % num_days
elif total_duration > one_hour:
num_hours = int(round(total_duration / one_hour))
if num_hours == 1:
return 'one hour'
else:
return '%d hours' % num_hours
else:
num_minutes = int(round(total_duration / one_minute))
if num_minutes == 1:
return 'one minute'
else:
return '%d minutes' % num_minutes
def _set_playlist_properties(self, view_manager):
window = view_manager.get_window()
#Playlist name
window.setProperty("PlaylistDetailName", self.__loader.get_name())
#Owner info
session = view_manager.get_var('session')
current_username = session.user().canonical_name()
playlist_username = self.__playlist.owner().canonical_name()
show_owner = current_username != playlist_username
window.setProperty("PlaylistDetailShowOwner", iif(show_owner, "true", "false"))
if show_owner:
window.setProperty("PlaylistDetailOwner", str(playlist_username))
#Collaboratie status
is_collaborative_str = iif(self.__playlist.is_collaborative(), "true", "false")
window.setProperty("PlaylistDetailCollaborative", is_collaborative_str)
#Length data
window.setProperty("PlaylistDetailNumTracks", str(self.__playlist.num_tracks()))
window.setProperty("PlaylistDetailDuration", self._get_playlist_length_str())
#Subscribers
window.setProperty("PlaylistDetailNumSubscribers", str(self.__playlist.num_subscribers()))
def _set_playlist_image(self, view_manager, thumbnails):
if len(thumbnails) > 0:
window = view_manager.get_window()
#Set cover layout info
cover_layout_str = iif(len(thumbnails) < 4, "one", "four")
window.setProperty("PlaylistDetailCoverLayout", cover_layout_str)
#Now loop to set all the images
for idx, thumb_item in enumerate(thumbnails):
item_num = idx + 1
is_remote = thumb_item.startswith("http://")
is_remote_str = iif(is_remote, "true", "false")
window.setProperty("PlaylistDetailCoverItem%d" % item_num, thumb_item)
window.setProperty("PlaylistDetailCoverItem%dIsRemote" % item_num, is_remote_str)
def render(self, view_manager):
if self.__loader.is_loaded():
session = view_manager.get_var('session')
pm = view_manager.get_var('playlist_manager')
list_obj = self.get_list(view_manager)
sm = SettingsManager()
#Set the thumbnails
self._set_playlist_image(view_manager, self.__loader.get_thumbnails())
#And the properties
self._set_playlist_properties(view_manager)
#Clear the list
list_obj.reset()
#Draw the items on the list
for list_index, track_obj in enumerate(self.__loader.get_tracks()):
show_track = (
track_obj.is_loaded() and
track_obj.error() == 0 and
(
track_obj.get_availability(session) == track.TrackAvailability.Available or
not sm.get_audio_hide_unplayable()
)
)
if show_track:
url, info = pm.create_track_info(track_obj, session, list_index)
list_obj.addItem(info)
return True
class SpecialPlaylistDetailView(PlaylistDetailView):
def __init__(self, session, playlist, playlist_manager, name, thumbnails):
self._set_playlist(playlist)
loader = loaders.SpecialPlaylistLoader(
session, playlist, playlist_manager, name, thumbnails
)
self._set_loader(loader)
| gpl-3.0 | 8,551,286,709,862,739,000 | 35.435345 | 99 | 0.572021 | false | 4.375315 | false | false | false |
tovrstra/horton | horton/periodic.py | 4 | 9840 | # -*- coding: utf-8 -*-
# HORTON: Helpful Open-source Research TOol for N-fermion systems.
# Copyright (C) 2011-2017 The HORTON Development Team
#
# This file is part of HORTON.
#
# HORTON is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# HORTON is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
# --
'''Periodic table of elements
This module contains an object ``periodic`` that can be used as a Pythonic
periodic table. It can be used as follows::
>>> from horton import periodic
>>> periodic['si'].number
14
>>> periodic['He'].number
2
>>> periodic['h'].symbol
'H'
>>> periodic[3].symbol
'Li'
>>> periodic['5'].symbol
'B'
'''
from horton.context import context
from horton.units import angstrom, amu
__all__ = ['periodic', 'Element', 'Periodic']
class Element(object):
'''Represents an element from the periodic table.
The following attributes are supported for all elements:
number
The atomic number.
symbol
A string with the symbol of the element.
name
The full element name.
group
The group of the element (not for actinides and lanthanides).
period
The row of the periodic system.
The following attributes are present for some elements. When a parameter
is not known for a given element, the attribute is set to None.
cov_radius_cordero
Covalent radius. B. Cordero, V. Gomez, A. E. Platero-Prats, M.
Reves, J. Echeverria, E. Cremades, F. Barragan, and S. Alvarez,
Dalton Trans. pp. 2832--2838 (2008), URL
http://dx.doi.org/10.1039/b801115j
cov_radius_bragg
Covalent radius. W. L. Bragg, Phil. Mag. 40, 169 (1920), URL
http://dx.doi.org/10.1080/14786440808636111
cov_radius_slater
Covalent radius. J. C. Slater, J. Chem. Phys. 41, 3199 (1964), URL
http://dx.doi.org/10.1063/1.1725697
vdw_radius_bondi
van der Waals radius. A. Bondi, J. Phys. Chem. 68, 441 (1964), URL
http://dx.doi.org/10.1021/j100785a001
vdw_radius_truhlar
van der Waals radius. M. Mantina A. C. Chamberlin R. Valero C. J.
Cramer D. G. Truhlar J. Phys. Chem. A 113 5806 (2009), URL
http://dx.doi.org/10.1021/jp8111556
vdw_radius_rt
van der Waals radius. R. S. Rowland and R. Taylor, J. Phys. Chem.
100, 7384 (1996), URL http://dx.doi.org/10.1021/jp953141+
vdw_radius_batsanov
van der Waals radius. S. S. Batsanov Inorganic Materials 37 871
(2001), URL http://dx.doi.org/10.1023/a%3a1011625728803
vdw_radius_dreiding
van der Waals radius. Stephen L. Mayo, Barry D. Olafson, and William
A. Goddard III J. Phys. Chem. 94 8897 (1990), URL
http://dx.doi.org/10.1021/j100389a010
vdw_radius_uff
van der Waals radius. A. K. Rappi, C. J. Casewit, K. S. Colwell, W.
A. Goddard III, and W. M. Skid J. Am. Chem. Soc. 114 10024 (1992),
URL http://dx.doi.org/10.1021/ja00051a040
vdw_radius_mm3
van der Waals radius. N. L. Allinger, X. Zhou, and J. Bergsma,
Journal of Molecular Structure: THEOCHEM 312, 69 (1994),
http://dx.doi.org/10.1016/s0166-1280(09)80008-0
wc_radius
Waber-Cromer radius of the outermost orbital maximum. J. T. Waber
and D. T. Cromer, J. Chem. Phys. 42, 4116 (1965), URL
http://dx.doi.org/10.1063/1.1695904
cr_radius
Clementi-Raimondi radius. E. Clementi, D. L. Raimondi, W. P.
Reinhardt, J. Chem. Phys. 47, 1300 (1967), URL
http://dx.doi.org/10.1063/1.1712084
pold_crc
Isolated atom dipole polarizability. CRC Handbook of Chemistry and
Physics (CRC, Boca Raton, FL, 2003). If multiple values were present
in the CRC book, the value used in Erin's postg code is taken.
pold_chu
Isolated atom dipole polarizability. X. Chu & A. Dalgarno, J. Chem.
Phys., 121(9), 4083--4088 (2004), URL
http://dx.doi.org/10.1063/1.1779576 Theoretical value for hydrogen
from this paper: A.D. Buckingham, K.L. Clarke; Chem. Phys. Lett.
57(3), 321--325 (1978), URL
http://dx.doi.org/10.1016/0009-2614(78)85517-1
c6_chu
Isolated atom C_6 dispersion coefficient. X. Chu & A. Dalgarno, J. Chem.
Phys., 121(9), 4083--4088 (2004), URL
http://dx.doi.org/10.1063/1.1779576 Theoretical value for hydrogen
from this paper: K. T. Tang, J. M. Norbeck and P. R. Certain; J.
Chem. Phys. 64, 3063 (1976), URL #
http://dx.doi.org/10.1063/1.432569
mass
The IUPAC atomic masses (wieghts) of 2013.
T.B. Coplen, W.A. Brand, J. Meija, M. Gröning, N.E. Holden, M.
Berglund, P. De Bièvre, R.D. Loss, T. Prohaska, and T. Walczyk.
http://ciaaw.org, http://www.ciaaw.org/pubs/TSAW2013_xls.xls,
When ranges are provided, the middle of the range is used.
The following attributes are derived from the data given above:
cov_radius:
| equals cov_radius_cordero
vdw_radius:
| vdw_radius_truhlar if present
| else vdw_radius_bondi if present
| else vdw_radius_batsanov if present
| else vdw_radius_mm3 if present
| else None
becke_radius:
| cov_radius_slater if present
| else cov_radius_cordero if present
| else None
pold:
| pold_crc
c6:
| c6_chu
'''
def __init__(self, number=None, symbol=None, **kwargs):
self.number = number
self.symbol = symbol
for name, value in kwargs.iteritems():
setattr(self, name, value)
self.cov_radius = self.cov_radius_cordero
if self.vdw_radius_truhlar is not None:
self.vdw_radius = self.vdw_radius_truhlar
elif self.vdw_radius_bondi is not None:
self.vdw_radius = self.vdw_radius_bondi
elif self.vdw_radius_batsanov is not None:
self.vdw_radius = self.vdw_radius_batsanov
elif self.vdw_radius_mm3 is not None:
self.vdw_radius = self.vdw_radius_mm3
else:
self.vdw_radius = None
if self.cov_radius_slater is not None:
self.becke_radius = self.cov_radius_slater
elif self.cov_radius_cordero is not None:
self.becke_radius = self.cov_radius_cordero
else:
self.becke_radius = None
self.pold = self.pold_crc
self.c6 = self.c6_chu
class Periodic(object):
'''A periodic table data structure.'''
def __init__(self, elements):
'''**Arguments:**
elements
A list of :class:`Element` instances.
'''
self.elements = elements
self._lookup = {}
for element in elements:
self._lookup[element.number] = element
self._lookup[element.symbol.lower()] = element
def __getitem__(self, index):
'''Get an element from the table based on a flexible index.
**Argument:**
index
This can be either an integer atomic number, a string with the
elemental symbol (any case), or a string with the atomic number.
**Returns:** the corresponding :class:`Element` instance
'''
result = self._lookup.get(index)
if result is None and isinstance(index, basestring):
index = index.strip()
result = self._lookup.get(index.lower())
if result is None and index.isdigit():
result = self._lookup.get(int(index))
if result is None:
raise KeyError('Could not find element %s.' % index)
return result
def load_periodic():
import csv
convertor_types = {
'int': (lambda s: int(s)),
'float': (lambda s : float(s)),
'au': (lambda s : float(s)), # just for clarity, atomic units
'str': (lambda s: s.strip()),
'angstrom': (lambda s: float(s)*angstrom),
'2angstrom': (lambda s: float(s)*angstrom/2),
'angstrom**3': (lambda s: float(s)*angstrom**3),
'amu': (lambda s: float(s)*amu),
}
with open(context.get_fn('elements.csv'),'r') as f:
r = csv.reader(f)
# go to the actual data
for row in r:
if len(row[1]) > 0:
break
# parse the first two header rows
names = row
convertors = [convertor_types[key] for key in r.next()]
elements = []
for row in r:
if len(row) == 0:
break
kwargs = {}
for i in xrange(len(row)):
cell = row[i]
if len(cell) > 0:
kwargs[names[i]] = convertors[i](cell)
else:
kwargs[names[i]] = None
elements.append(Element(**kwargs))
return Periodic(elements)
periodic = load_periodic()
| gpl-3.0 | -8,768,901,301,395,788,000 | 33.640845 | 84 | 0.575219 | false | 3.405331 | false | false | false |
BenoitDamota/mempamal | mempamal/arguments.py | 1 | 2611 | # Author: Benoit Da Mota <damota.benoit@gmail.com>
#
# License: BSD 3 clause
"""
Build arguments parser for the scripts (mapper, reducers and command builder).
"""
import argparse
def get_map_argparser():
"""Build command line arguments parser for a mapper.
Arguments parser compatible with the commands builder workflows.
"""
parser = argparse.ArgumentParser()
parser.add_argument("crossval",
help="JSON file to configure cross validation scheme")
parser.add_argument("method",
help="JSON file to configure the method")
parser.add_argument("dataset",
help="Joblib file with data and folds")
parser.add_argument("out",
help="Filename to output the results")
parser.add_argument("outer", type=int,
help="Outer CV Id")
parser.add_argument("--inner", type=int,
help="Inner CV Id")
# verbose mode
parser.add_argument("-v", "--verbose", help="verbose mode",
action="store_true")
return parser
def get_ired_argparser():
"""Build command line arguments parser for an inner reducer.
Arguments parser compatible with the commands builder workflows.
"""
parser = argparse.ArgumentParser()
parser.add_argument("crossval",
help="JSON file to configure cross validation scheme")
parser.add_argument("method",
help="JSON file to configure the method")
parser.add_argument("dataset",
help="Joblib file with data and folds")
parser.add_argument("out",
help="Filename to output the results")
parser.add_argument("in",
help="Filename template for input files")
parser.add_argument("outer", type=int,
help="Outer CV Id")
# verbose mode
parser.add_argument("-v", "--verbose", help="verbose mode",
action="store_true")
return parser
def get_ored_argparser():
"""Build command line arguments parser for an outer reducer.
Arguments parser compatible with the commands builder workflows.
"""
parser = argparse.ArgumentParser()
parser.add_argument("out",
help="Filename to output the results")
parser.add_argument("in",
help="Filename template for input files")
# verbose mode
parser.add_argument("-v", "--verbose", help="verbose mode",
action="store_true")
return parser
| bsd-3-clause | 886,761,547,384,171,100 | 32.474359 | 78 | 0.592876 | false | 4.712996 | false | false | false |
riga/law | examples/workflows/tasks.py | 1 | 4679 | # coding: utf-8
"""
Example showing (local) law workflows.
The actual payload of the tasks is rather trivial.
"""
import os
import time
import random
import six
import luigi
import law
def maybe_wait(func):
"""
Wrapper around run() methods that reads the *slow* flag to decide whether to wait some seconds
for illustrative purposes. This is very straight forward, so no need for functools.wraps here.
"""
def wrapper(self, *args, **kwargs):
if self.slow:
time.sleep(random.randint(5, 15))
return func(self, *args, **kwargs)
return wrapper
class Task(law.Task):
"""
Base task that provides some convenience methods to create local file and directory targets at
the default data path, as defined in the setup.sh.
"""
slow = luigi.BoolParameter(description="before running, wait between 5 and 15 seconds")
def store_parts(self):
return (self.__class__.__name__,)
def local_path(self, *path):
# WORKFLOWEXAMPLE_DATA_PATH is defined in setup.sh
parts = (os.getenv("WORKFLOWEXAMPLE_DATA_PATH"),) + self.store_parts() + path
return os.path.join(*parts)
def local_target(self, *path):
return law.LocalFileTarget(self.local_path(*path))
class CreateChars(Task, law.LocalWorkflow):
"""
Simple task that has a trivial payload: converting integers into ascii characters. The task is
designed to be a workflow with 26 branches. Each branch creates one character (a-z) and saves
it to a json output file. While branches are numbered continuously from 0 to n-1, the actual
data it processes is defined in the *branch_map*. A task can access this data via
``self.branch_map[self.branch]``, or via ``self.branch_data`` by convenience.
In this example CreateChars is a LocalWorkflow, but in general it can also inherit from multiple
other workflow classes. The code in this task should be completely independent of the actual
*run location*, and law provides the means to do so.
When a branch greater or equal to zero is set, e.g. via ``"--branch 1"``, you instantiate a
single *branch task* rather than the workflow. Branch tasks are always executed locally.
"""
def create_branch_map(self):
# map branch indexes to ascii numbers from 97 to 122 ("a" to "z")
return {i: num for i, num in enumerate(range(97, 122 + 1))}
def output(self):
# it's best practice to encode the branch number into the output target
return self.local_target("output_{}.json".format(self.branch))
@maybe_wait
def run(self):
# the branch data holds the integer number to convert
num = self.branch_data
# actual payload: convert to char
char = chr(num)
# use target formatters (implementing dump and load, based on the file extension)
# to write the output target
output = self.output()
output.dump({"num": num, "char": char})
class CreateAlphabet(Task):
"""
This task requires the CreateChars workflow and extracts the created characters to write the
alphabet into a text file.
"""
def requires(self):
# req() is defined on all tasks and handles the passing of all parameter values that are
# common between the required task and the instance (self)
# note that the workflow is required (branch -1, the default), not the particular branch
# tasks (branches [0, 26))
return CreateChars.req(self)
def output(self):
# output a plain text file
return self.local_target("alphabet.txt")
@maybe_wait
def run(self):
# since we require the workflow and not the branch tasks (see above), self.input() points
# to the output of the workflow, which contains the output of its branches in a target
# collection, stored - of course - in "collection"
inputs = self.input()["collection"].targets
# loop over all targets in the collection, load the json data, and append the character
# to the alphabet
alphabet = ""
for inp in six.itervalues(inputs):
alphabet += inp.load()["char"]
# again, dump the alphabet string into the output file
output = self.output()
output.dump(alphabet + "\n")
# some status message
# publish_message not only prints the message to stdout, but sends it to the scheduler
# where it will become visible in the browser visualization
alphabet = "".join(law.util.colored(c, color="random") for c in alphabet)
self.publish_message("\nbuilt alphabet: {}\n".format(alphabet))
| bsd-3-clause | -1,782,150,594,808,893,400 | 35.554688 | 100 | 0.666168 | false | 4.261384 | false | false | false |
thenenadx/forseti-security | google/cloud/security/inventory/pipelines/load_instance_groups_pipeline.py | 1 | 3907 | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Pipeline to load compute instance groups into Inventory.
This pipeline depends on the LoadProjectsPipeline.
"""
from google.cloud.security.common.gcp_api import errors as api_errors
from google.cloud.security.common.data_access import project_dao as proj_dao
from google.cloud.security.common.util import log_util
from google.cloud.security.common.util import parser
from google.cloud.security.inventory import errors as inventory_errors
from google.cloud.security.inventory.pipelines import base_pipeline
# TODO: The next editor must remove this disable and correct issues.
# pylint: disable=missing-type-doc,missing-return-type-doc
# pylint: disable=missing-yield-type-doc
LOGGER = log_util.get_logger(__name__)
class LoadInstanceGroupsPipeline(base_pipeline.BasePipeline):
"""Load compute instance groups for all projects."""
RESOURCE_NAME = 'instance_groups'
def _transform(self, resource_from_api):
"""Create an iterator of instance groups to load into database.
Args:
resource_from_api: A dict of instance groups, keyed by
project id, from GCP API.
Yields:
Iterator of instance group properties in a dict.
"""
for (project_id, instance_groups) in resource_from_api.iteritems():
for instance_group in instance_groups:
yield {'project_id': project_id,
'id': instance_group.get('id'),
'creation_timestamp': parser.format_timestamp(
instance_group.get('creationTimestamp'),
self.MYSQL_DATETIME_FORMAT),
'name': instance_group.get('name'),
'description': instance_group.get('description'),
'named_ports': parser.json_stringify(
instance_group.get('namedPorts', [])),
'network': instance_group.get('network'),
'region': instance_group.get('region'),
'size': self._to_int(instance_group.get('size')),
'subnetwork': instance_group.get('subnetwork'),
'zone': instance_group.get('zone'),
'raw_instance_group':
parser.json_stringify(instance_group)}
def _retrieve(self):
"""Retrieve instance groups from GCP.
Get all the projects in the current snapshot and retrieve the
compute instance groups for each.
Returns:
A dict mapping projects with their instance groups (list):
{project_id: [instance groups]}
"""
projects = proj_dao.ProjectDao().get_projects(self.cycle_timestamp)
igs = {}
for project in projects:
try:
project_igs = self.api_client.get_instance_groups(project.id)
if project_igs:
igs[project.id] = project_igs
except api_errors.ApiExecutionError as e:
LOGGER.error(inventory_errors.LoadDataPipelineError(e))
return igs
def run(self):
"""Run the pipeline."""
igs = self._retrieve()
loadable_igs = self._transform(igs)
self._load(self.RESOURCE_NAME, loadable_igs)
self._get_loaded_count()
| apache-2.0 | 3,827,020,058,999,647,700 | 39.697917 | 77 | 0.626056 | false | 4.460046 | false | false | false |
wadobo/Ftask | ftask/app/board/task_views.py | 2 | 4705 | # Ftask, simple TODO list application
# Copyright (C) 2012 Daniel Garcia <danigm@wadobo.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division, absolute_import
from flask import abort
from flask import jsonify
from flask import request
from flask import g
from ..db import get_db, to_json
from ..auth.decorators import authenticated
from .board_views import get_board_by_id
from .board_views import can_view_board
from bson.objectid import ObjectId
@authenticated
@can_view_board
def view_board_tasks(boardid):
c = get_db().tasks
t = c.find({'boardid': boardid}).sort([('order', 1)])
meta = {}
meta['total'] = t.count()
objs = [to_json(i) for i in t]
return jsonify(meta=meta,
objects=objs)
view_board_tasks.path = '/<boardid>/tasks/'
@authenticated
@can_view_board
def view_list_tasks(boardid, listid):
c = get_db().tasks
t = c.find({'listid': listid}).sort([('order', 1)])
meta = {}
meta['total'] = t.count()
objs = [serialize_task(i) for i in t]
return jsonify(meta=meta,
objects=objs)
view_list_tasks.path = '/<boardid>/lists/<listid>/tasks/'
@authenticated
@can_view_board
def new_list_task(boardid, listid):
c = get_db().tasks
description = request.form['description']
order = c.find({'boardid': boardid, 'listid': listid}).count()
t = {
'boardid': boardid,
'listid': listid,
'description': description,
'order': order,
}
c.insert(t)
return jsonify(status="success")
new_list_task.path = '/<boardid>/lists/<listid>/tasks/new/'
new_list_task.methods = ['POST']
@authenticated
@can_view_board
def view_list_task(boardid, listid, taskid):
c = get_db().tasks
t = c.find_one({'boardid': boardid, '_id': ObjectId(taskid)})
if request.method == 'GET':
if not t:
raise abort(404)
return jsonify(serialize_task(t))
elif request.method == 'PUT':
update_task(t, g.user, request.form)
elif request.method == 'DELETE':
delete_task(t, g.user)
return jsonify(status="success")
view_list_task.path = '/<boardid>/lists/<listid>/tasks/<taskid>/'
view_list_task.methods = ['GET', 'PUT', 'DELETE']
@authenticated
@can_view_board
def assign_task(boardid, listid, taskid):
c = get_db().tasks
t = c.find_one({'boardid': boardid, 'listid': listid, '_id': ObjectId(taskid)})
if not t:
raise abort(404)
# not with the same name
user = request.form['user']
assign = t.get('assign', [])
if user in assign:
return jsonify(status="success")
t['assign'] = t.get('assign', []) + [user]
c.save(t)
return jsonify(status="success")
assign_task.path = '/<boardid>/lists/<listid>/tasks/<taskid>/assign/'
assign_task.methods = ['POST']
@authenticated
@can_view_board
def unassign_task(boardid, listid, taskid):
c = get_db().tasks
t = c.find_one({'boardid': boardid, 'listid': listid, '_id': ObjectId(taskid)})
if not t:
raise abort(404)
# not with the same name
user = request.form['user']
l = t.get('assign', [])
l.remove(user)
t['assign'] = l
c.save(t)
return jsonify(status="success")
unassign_task.path = '/<boardid>/lists/<listid>/tasks/<taskid>/unassign/'
unassign_task.methods = ['POST']
def task_board(t):
c = get_db().boards
return get_board_by_id(t['boardid'])
def task_list(t):
c = get_db().boards
b = get_board_by_id(t['boardid'])
for l in b.get('lists', []):
if l['id'] == t['listid']:
return l
return None
def update_task(task, user, newdata):
for k, v in newdata.items():
if k == "order":
task[k] = int(v)
else:
task[k] = v
get_db().tasks.save(task)
def delete_task(task, user):
get_db().tasks.remove({'_id': task['_id']})
def serialize_task(t):
s = t.get('assign', [])
t['assign'] = [to_json(u, excludes=['password']) for u in get_db().users.find({"username": {"$in": s}})]
serialized = to_json(t)
return serialized
| agpl-3.0 | 2,797,238,628,809,181,000 | 25.885714 | 108 | 0.630606 | false | 3.229238 | false | false | false |
transceptor-technology/trender | trender/block_text.py | 1 | 1588 | '''BlockText, used to parse text with variable etc.
:copyright: 2015, Jeroen van der Heijden (Transceptor Technology)
'''
import re
from .constants import VAR_DOTS
class BlockText:
RE_VAR = re.compile('@([{VAR_DOTS}]+)(!?)'.format(VAR_DOTS=VAR_DOTS),
re.UNICODE)
def __init__(self, text):
'''Initialize template line (or lines).'''
self._need_format = False
self._text = self._compile(text)
def render(self, namespace):
'''Render template lines.
Note: we only need to parse the namespace if we used variables in
this part of the template.
'''
return self._text.format_map(namespace.dictionary) \
if self._need_format else self._text
def _compile(self, text):
# replace curly braces wit double curly so the will be
# escaped when using format.
text = text.replace('{', '{{').replace('}', '}}')
# when variable are found we will also set _need_format to True
text = self.__class__.RE_VAR.sub(self._set_vars, text)
# replace escaped @! characters with just @
text = text.replace('@!', '@')
# undo the escaping when formatting is not needed
if not self._need_format:
text = text.replace('{{', '{').replace('}}', '}')
return text
def _set_vars(self, m):
'''Set _need_format to True and return the variable wrapped in curly
braces so it can be formatted.'''
self._need_format = True
return '{' + m.group(1).replace('.', '-') + '}'
| mit | -3,713,339,057,923,147,000 | 31.408163 | 76 | 0.578086 | false | 4.020253 | false | false | false |
Velaya/gbol_solr | server/solr/collection1/conf/getCommonNames_multithread.py | 1 | 11230 | # -*- coding: UTF-8 -*-
###############################################
# getCommonNames_multithreading.py
# Ver. 0.2 (multithreading support)
# Script to retrieve common names from Rest-API
# http://openup.nhm-wien.ac.at/commonNames/
# of Uni Wien (Heimo Reiner)
# Christian Koehler, ZFMK: c.koehler@zfmk.de
###############################################
###############################################
# some definitions
###############################################
# debug mode (restrict number of results to x_debug_results, give additional info). Slower!
debug = False
x_debug_results = 100
# database connection:
db_host = "144.76.31.113"
db_user = "koehler_zfmk"
db_passwd = "zfmk_bonn"
db_db = "koehler_zfmk"
# number of worker threads to complete the processing. Value between 50 and 100 is recommended.
num_worker_threads = 190
# output file name
output_file_name = 'Synonyms_common_names.txt'
# Encoding for output file
encoding = "UTF-8"
# Output format. So far we only have 'solr synonym'
# I will add additional formats on request
output_format = 'solr synonym'
# List of wanted languages. Note: Webservice does not always use ISO country codes
# The webservice provides the following languages:
# None, Ain, Bokm\xe5l, Chi, Cze, Dut, Dutch, Dzo, Eng, English, Fre, French, Ger, German, Gre, Hin, Hindi, Hrv, Srp,
# Hun, Ita, Jpn (Kanji), Jpn (Katakana), Kas, Kas, Pan, Kor (Hangul), Mon, Nep, Nep (uncertain), Nor, Nynorsk, Pahari?,
# Pan, Pol, Por, Rus, Russian, Sinhala, Slo, Spa, Spainsh, Spanish, Srp, Swe, Tamil, Tuk, Tur, Urd, ces, dan, en, e,
# fas, fi, gl, heb, hocg, ir, mi, nld, rus, slk, sv, swe, uk, ukr, we
# Use "all" to get all languages
# example: languages = 'all'
languages = ['German', 'Ger', 'de', 'en', 'eng', 'English', 'Eng']
# END OF DEFINITIONS ############################
import Queue
import threading
import json
from datetime import datetime
from time import sleep
from random import randint
import requests
import MySQLdb
# input queue with all species
species_queue = Queue.Queue(maxsize=0)
# output queue with the retrieved synonyms
synonym_queue = Queue.Queue(maxsize=0)
def get_species_list(source='buildin'):
"""Get a list of species.
Data can be retrieved from database (source=db) or as an example list (source=buildin)"""
# Fixed list of some random species for testing without db connection
species_list = ['Turdus merula', ' Salix alba', 'Russula violacea', 'Russula violeipes', 'Russula virescens ',
'Russula viscida ', 'Russula xerampelina ', 'Russula zvarae ',
'Ruta angustifolia ', 'Ruta chalepensis ', 'Ruta fruticulosa ', 'Ruta graveolens ',
'Ruta linifolia ', 'Ruta montana ', 'Ruta patavina ', 'Ruta pinnata ', 'Ruta pubescens ',
'Ruthalicia eglandulosa ', 'Rutidea decorticata ', 'Rutidea smithii ', 'Rutilaria ',
'Rutilaria edentula ', 'Rutilaria epsilon longicornis', 'Schiedea obovata', 'Schiedea perlmanii',
'Schiedea sarmentosa', 'Schiekia orinocensis', 'Scabiosa africana', 'Scabiosa agrestis',
'Scabiosa albanensis', 'Scabiosa albescen', 'Scabiosa albocincta', 'Scabiosa alpina',
'Scabiosa altissima', 'Scabiosa argentea', 'Scabiosa arvensis', 'Scabiosa atropurpurea',
'Scabiosa attenuata', 'Scabiosa australis', 'Scariola alpestris', 'Salvia africana',
'Salvia discolor', 'Sanguisorba alpina']
if source == 'db':
species_list = []
db = MySQLdb.connect(host=db_host, user=db_user, passwd=db_passwd, db=db_db)
cur = db.cursor()
sql_statement = 'SELECT DISTINCT taxonAtomised.canonical FROM taxonAtomised'
if debug:
# in debug mode only some results
sql_statement = '%s LIMIT %s' % (sql_statement, x_debug_results)
cur.execute(sql_statement)
for row in cur.fetchall():
species_list.append(row[0])
return species_list
def get_synonym(species):
"""Look up the synonym for a species from the web service"""
# give the webservice a break :-)
sleep(randint(2, 6))
url = 'http://openup.nhm-wien.ac.at/commonNames/?query={"type":"/name/common","query":"%s"}' % species
json_data = requests.get(url).text
if len(json_data) < 20 or "result" not in json.loads(json_data):
# an 'empty' response may contain something like {u'result': []}
return None
if len(json_data) > 20 and "result" not in json.loads(json_data):
# trying to identify broken responeses
print "ERROR in get_sysnonym: length: %s JSON %s returned %s" % (species, len(json_data), json.loads(json_data))
results = json.loads(json_data)['result']
common_name_dict = {}
for i in results:
if languages == 'all' or i['language'] in languages:
# only exact matches marked with "match" (webservice provides fuzzy search, too)
if i['match']:
if i['language'] not in common_name_dict.keys():
common_name_dict[i['language']] = []
if i['name'] not in common_name_dict[i['language']]:
common_name_dict[i['language']].append(i['name'])
entry = ''
for language in common_name_dict.keys():
for synonym in common_name_dict[language]:
# add new synonym, if it does not contain a comma (like 'Melon, Water')
if synonym not in entry and synonym.find(',') == -1:
# clean up a bit (get rid of commas, strip trailing spaces, remove double spaces)
entry = '%s %s,' % (entry, synonym.strip().replace(' ', ' '))
# append scientific name at the end (solr synonym style)
entry = ('%s %s' % (entry, species))
species_to_go = species_queue.qsize()
print "Found for %s: %s \t\t (%s to go)" % (species, entry, species_to_go,)
return entry.strip()
def get_available_languages():
"""Return a list of available translation language of the webservice.
For debugging only! This takes some time ... be patient.
In debug mode only some species (x_debug_results) are inspected."""
language_list = []
species_list = get_species_list(source='db')
if debug:
print species_list
number_of_species = len(species_list)
print '%s species in list' % number_of_species
print 'Inspecting ... starting count down: ',
for species in species_list:
if debug:
number_of_species -= 1
print ('%s ... ' % number_of_species),
# sometimes we have invalid species names (None, empty string) in DB
if species:
url = 'http://openup.nhm-wien.ac.at/commonNames/?query={"type":"/name/common","query":"%s"}' % species
json_data = requests.get(url).text
results = json.loads(json_data)['result']
for i in results:
if i and i['language'] not in language_list:
language_list.append(i['language'])
return sorted(language_list)
# another queued thread we will use to print output
def file_writer():
"""Asynchron writing synonyms to file from queue.
Note: the functions does not implement semaphores or other file locking. So it is not thread safe (yet).
Multiple threads for writing to file does not make sense here, as this task is 1000 times faster than
the data retrieval from the REST api"""
while True:
# when the worker puts stuff in the output queue, write them to the file system
synonyms = synonym_queue.get()
output_file = open(output_file_name, 'a', 1)
try:
# only append to list, if we have at least one synonym
if synonyms and synonyms.find(',') > 0:
data = '%s\n' % synonyms.encode(encoding)
output_file.write(data)
if debug:
print 'Writing: %s \t(%s in queue)' % (synonyms, synonym_queue.qsize())
except:
data = '# ERROR: Encoding Error: %s\n' % synonyms
output_file.write(data)
if debug:
print data
output_file.close()
synonym_queue.task_done()
def write_file_header(file_format):
"""Write a header for the output file. I only implemented the "solr synonym" so far"""
output_file = open(output_file_name, 'w', 1)
# solr synonym file
if file_format == 'solr synonym':
comment_marker = '#'
# all other formats
else:
comment_marker = '# //'
output_file.write('%s Common Name Synonym List\n' % comment_marker)
output_file.write('%s Version 0.2 mt\n' % comment_marker)
output_file.write('%s Format: %s\n' % (comment_marker, file_format))
output_file.write('%s Languages: %s\n' % (comment_marker, languages))
if debug:
output_file.write('%s Available Languages: %s\n' % (comment_marker, get_available_languages()))
output_file.write('%s Encoding: %s\n' % (comment_marker, encoding))
output_file.write('%s Date: %s\n' % (comment_marker, datetime.now().strftime("%d/%m/%Y (%H:%M)")))
output_file.write('%s Christian Koehler (koehler@zfmk.de)\n' % comment_marker)
if debug:
output_file.write('%s Debug mode!)\n' % comment_marker)
output_file.write('\n')
output_file.close()
def worker():
"""Process that each worker thread will execute until the species_queue is empty"""
while True:
# get item from queue, do work on it, let queue know processing is done for one item
item = species_queue.get()
synonym_queue.put(get_synonym(item))
species_queue.task_done()
# launch all of our queued processes
def main():
# prepare the output file
write_file_header(output_format)
# Launches a number of worker threads to perform operations using the queue of inputs
for i in range(num_worker_threads):
t = threading.Thread(target=worker)
t.daemon = True
t.start()
# launches a single "printer" thread to output the result (makes things neater)
t = threading.Thread(target=file_writer)
t.daemon = True
t.start()
# populate species_queue
species_list = get_species_list('db')
for species in species_list:
# there are some empty or broken enties
if species is not None and len(species) > 6:
species_queue.put(species)
# wait for the two queues to be emptied (and workers to close)
species_queue.join() # block until all tasks are done
print "Got all data from REST api"
synonym_queue.join()
# Some info at the end
output_file = open(output_file_name, 'a', 1)
output_file.write('# Finished Processing: Date: %s\n' % (datetime.now().strftime("%d/%m/%Y (%H:%M)")))
output_file.write('######## E O F ##########')
output_file.close()
print "Processing and writing complete"
main()
| apache-2.0 | 1,703,331,227,198,309,400 | 41.02682 | 120 | 0.603829 | false | 3.515967 | false | false | false |
ageron/tensorflow | tensorflow/contrib/opt/python/training/reg_adagrad_optimizer.py | 39 | 3782 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""RegAdagrad for TensorFlow."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.ops import math_ops
from tensorflow.python.training import adagrad
from tensorflow.python.training import training_ops
from tensorflow.python.util import tf_contextlib
class RegAdagradOptimizer(adagrad.AdagradOptimizer):
"""RegAdagrad: Adagrad with updates that optionally skip updating the slots.
This is meant to address the problem of additional regularization terms in the
loss function affecting learning rate decay and causing hyper-param
entanglement. Example usage:
loss = tf.nn.cross_entropy(x, labels)
reg_loss = reg_strength * tf.reduce_sum(x * x)
opt = tf.contrib.opt.RegAdagradOptimizer(learning_rate)
loss_update = opt.minimize(loss)
with opt.avoid_updating_slots():
reg_update = opt.minimize(reg_loss)
total_update = tf.group([loss_update, reg_update])
# ...
sess.run(total_update, ...)
"""
def __init__(self,
learning_rate,
initial_accumulator_value=0.1,
use_locking=False,
name="RegAdagrad"):
super(RegAdagradOptimizer, self).__init__(
learning_rate,
initial_accumulator_value=initial_accumulator_value,
use_locking=use_locking,
name=name)
self._should_update_slots = True
@tf_contextlib.contextmanager
def avoid_updating_slots(self):
old = self._should_update_slots
self._should_update_slots = False
try:
yield
finally:
self._should_update_slots = old
def _apply_dense(self, grad, var):
acc = self.get_slot(var, "accumulator")
return training_ops.apply_adagrad(
var,
acc,
math_ops.cast(self._learning_rate_tensor, var.dtype.base_dtype),
grad,
use_locking=self._use_locking,
update_slots=self._should_update_slots)
def _resource_apply_dense(self, grad, var, update_slots=True):
acc = self.get_slot(var, "accumulator")
return training_ops.resource_apply_adagrad(
var.handle,
acc.handle,
math_ops.cast(self._learning_rate_tensor, grad.dtype.base_dtype),
grad,
use_locking=self._use_locking,
update_slots=self._should_update_slots)
def _apply_sparse(self, grad, var, update_slots=True):
acc = self.get_slot(var, "accumulator")
return training_ops.sparse_apply_adagrad(
var,
acc,
math_ops.cast(self._learning_rate_tensor, var.dtype.base_dtype),
grad.values,
grad.indices,
use_locking=self._use_locking,
update_slots=self._should_update_slots)
def _resource_apply_sparse(self, grad, var, indices, update_slots=True):
acc = self.get_slot(var, "accumulator")
return training_ops.resource_sparse_apply_adagrad(
var.handle,
acc.handle,
math_ops.cast(self._learning_rate_tensor, grad.dtype),
grad,
indices,
use_locking=self._use_locking,
update_slots=self._should_update_slots)
| apache-2.0 | -2,308,720,905,346,034,000 | 34.345794 | 80 | 0.665256 | false | 3.875 | false | false | false |
semolex/astropylis | telescopes.py | 1 | 12316 | # -*- coding: utf-8 -*-
# TODO: convert some methods to numeric attributes.
from decimal import Decimal, getcontext
getcontext().prec = 3
class Telescope(object):
"""
Simple class that represents user's abstract telescope.
"""
__count = 0
def __init__(self, aperture, focal_length, name=None, brand=None, model=None, **kwargs):
"""
Creates instance of the abstract telescope with minimal required params.
At least focal_length and aperture params must be present.
Name can be generated automatically in form 'Telescope #n' where n = number of created telescopes.
`eyepieces` attribute is created for holding eyepieces for defined telescope.
`additional_info` attribute can be used for holding user-defined data, for example mount or viewfinder type etc.
:param focal_length: focal length of the telescope in millimeters
:type focal_length: int
:param aperture: aperture (diameter) of the telescope in millimeters
:type aperture: int
:param name: name of the telescope
:type name: str
:param brand: brand (merchant) of the telescope
:type brand: str
:param model: model name of the telescope
:type model: str
"""
Telescope.__count += 1
if not name:
name = 'Telescope #{}'.format(Telescope.__count)
self.name = name
self.brand = brand
self.model = model
self.focal_length = Decimal(focal_length)
self.aperture = Decimal(aperture)
self.eyepieces = {}
self.additional_info = {}
for key, value in kwargs.iteritems():
self.additional_info[key] = value
def __repr__(self):
return ('{0}({1}, {2}, {3}, {4}, {5})'.format(self.__class__.__name__, self.name, self.brand, self.model,
self.focal_length, self.aperture))
def __str__(self):
return 'class: {0}, name: {1}, brand: {2}, model: {3}, focal length: {4}, aperture: {5}'.format(
self.__class__.__name__, self.name, self.brand, self.model,
self.focal_length, self.aperture)
def add_eyepiece(self, focal_length, name=None, brand=None, afov=None):
"""
Method that adds eyepiece representation into `self.eyepieces` attribute for further calculations.
If `name` param is not passed, it will generate default name for the eyepiece in form 'Eyepiece #n`,
where n = number of eyepieces + 1.
:param focal_length: focal length of the eyepiece im millimeters
:type focal_length: int
:param name: name of the eyepiece, used as key for dict with eyepiece representation
:type name: str
:param brand: brand of the eyepiece
:type brand: str
:param afov: field of view of the eyepiece in degrees
:type afov: int
:Example:
>>> import pprint
>>> myscope = Telescope(name='My Scope', model='Super', brand='MegaScope', aperture=100, focal_length=1000)
>>> myscope.add_eyepiece(focal_length=25, name='MyOcular', brand='SuperBrand', afov=50)
>>> pprint.pprint(myscope.eyepieces)
{'MyOcular': {'afov': 50, 'brand': 'SuperBrand', 'focal_length': 25}}
>>> myscope.add_eyepiece(focal_length=10, brand='Custom', afov=50)
>>> myscope.add_eyepiece(focal_length=20, brand='Custom', afov=50)
>>> pprint.pprint(myscope.eyepieces)
{'Eyepiece #2': {'afov': 50, 'brand': 'Custom', 'focal_length': 10},
'Eyepiece #3': {'afov': 50, 'brand': 'Custom', 'focal_length': 20},
'MyOcular': {'afov': 50, 'brand': 'SuperBrand', 'focal_length': 25}}
"""
if not name:
name = 'Eyepiece #{}'.format(len(self.eyepieces) + 1)
self.eyepieces[name] = {'focal_length': focal_length, 'brand': brand, 'afov': afov}
def get_dawes_limit(self, numeric=False):
"""
Method that calculates theoretical Dawes limit for telescope.
:param numeric: if set to True, result will be returned as numeric value
:type numeric: bool
:return: string or Decimal value in arc seconds
:rtype: str or Decimal
:Example:
>>> myscope = Telescope(name='My Scope', model='Super', brand='MegaScope', aperture=100, focal_length=1000)
>>> print(myscope.get_dawes_limit())
1.16"
>>> print(myscope.get_dawes_limit(numeric=True))
1.16
"""
resolution = Decimal(116) / self.aperture
if numeric:
return resolution
return '{}"'.format(resolution)
def get_rayleigh_criterion(self, numeric=False):
"""
Method that calculates theoretical Rayleigh criterion for telescope.
:param numeric: if set to True, result will be returned as numeric value
:type numeric: bool
:return: string or Decimal value in arc seconds
:rtype: str or Decimal
:Example:
>>> myscope = Telescope(name='My Scope', model='Super', brand='MegaScope', aperture=100, focal_length=1000)
>>> print(myscope.get_rayleigh_criterion())
1.38"
>>> print(myscope.get_rayleigh_criterion(numeric=True))
1.38
"""
resolution = Decimal(138) / self.aperture
if numeric:
return resolution
return '{}"'.format(resolution)
def get_exit_pupil(self, eyepiece, numeric=False):
"""
Method that calculates exit pupil for combination of telescope and eyepiece.
Eyepiece must be added via `add_eyepiece` method.
:param eyepiece: name of the eyepiece from `self.eyepieces` attribute
:type eyepiece: str
:param numeric: if set to True, result will be returned as numeric value
:type numeric: bool
:return: string or Decimal with exit pupil value in millimeters
:rtype: str or Decimal
:Example:
>>> myscope = Telescope(name='My Scope', model='Super', brand='MegaScope', aperture=100, focal_length=1000)
>>> myscope.add_eyepiece(focal_length=25, name='MyOcular', afov=50)
>>> print(myscope.get_exit_pupil(eyepiece='MyOcular'))
exit pupil: 2.5mm
>>> print(myscope.get_exit_pupil(eyepiece='MyOcular', numeric=True))
2.5
"""
exit_pupil = self.aperture / self.get_eyepiece_magnification(eyepiece, numeric=True)
if numeric:
return exit_pupil
return 'exit pupil: {}mm'.format(exit_pupil)
def get_eyepiece_magnification(self, eyepiece, numeric=False):
"""
Method that calculates magnification of the telescope combined with eyepiece.
Eyepiece must be added via `add_eyepiece` method.
:param eyepiece: name of the eyepiece from `self.eyepieces` attribute
:type eyepiece: str
:param numeric: if set to True, result will be returned as numeric value
:type numeric: bool
:return: string or Decimal with magnification value
:rtype: str or Decimal
:Example:
>>> myscope = Telescope(name='My Scope', model='Super', brand='MegaScope', aperture=100, focal_length=1000)
>>> myscope.add_eyepiece(focal_length=10, name='MyOcular')
>>> print(myscope.get_eyepiece_magnification(eyepiece='MyOcular'))
100X
>>> print(myscope.get_eyepiece_magnification(eyepiece='MyOcular', numeric=True))
100
"""
eyepiece_magnification = Decimal(self.focal_length / self.eyepieces[eyepiece]['focal_length'])
if numeric:
return eyepiece_magnification
return '{}X'.format(eyepiece_magnification)
def get_field_of_view(self, eyepiece, numeric=False):
"""
Method that calculates true field of view for combination of telescope and eyepiece.
Eyepiece must be added via `add_eyepiece` method.
:param eyepiece: name of the eyepiece from `self.eyepieces` attribute
:type eyepiece: str
:param numeric: if set to True, result will be returned as numeric value
:type numeric: bool
:return: string or Decimal with field of view value in degrees
:rtype: str or Decimal
:Example:
>>> myscope = Telescope(name='My Scope', model='Super', brand='MegaScope', aperture=100, focal_length=1000)
>>> myscope.add_eyepiece(focal_length=25, name='MyOcular', afov=50)
>>> print(myscope.get_field_of_view(eyepiece='MyOcular'))
FOV: 1.25
>>> print(myscope.get_field_of_view(eyepiece='MyOcular', numeric=True))
1.25
"""
magnification = self.get_eyepiece_magnification(eyepiece, numeric=True)
fov = self.eyepieces[eyepiece]['afov'] / magnification
if numeric:
return fov
return 'FOV: {}'.format(fov)
def get_focal_ratio(self, numeric=False):
"""
Method that calculates focal ratio of the telescope.
:param numeric: if set to True, result will be returned as numeric value
:type numeric: bool
:return: string or Decimal with f-number value
:rtype: str or Decimal
:Example:
>>> myscope = Telescope(name='My Scope', model='Super', brand='MegaScope', aperture=100, focal_length=1000)
>>> print(myscope.get_focal_ratio())
f/10
>>> print(myscope.get_focal_ratio(numeric=True))
10
"""
foc_ratio = self.focal_length / self.aperture
if numeric:
return foc_ratio
return 'f/{}'.format(foc_ratio)
def get_max_magnification(self, numeric=False):
"""
Method that calculates telescope's theoretical highest useful magnification.
:param numeric: if set to True, result will be returned as numeric value
:type numeric: bool
:return: string or Decimal with maximum magnification value
:rtype: str or Decimal
:Example:
>>> myscope = Telescope(name='My Scope', model='Super', brand='MegaScope', aperture=100, focal_length=1000)
>>> print(myscope.get_max_magnification())
200X
>>> print(myscope.get_max_magnification(numeric=True))
200
"""
max_magnification = self.aperture * 2
if numeric:
return max_magnification
return '{}X'.format(max_magnification)
def get_info(self):
"""
Method that calculates common specifications for the defined telescope.
:return: calculated common specifications
:rtype: dict
:Example:
>>> import pprint
>>> myscope = Telescope( aperture=100, focal_length=1000, name='My Scope', model='Super',\
brand='MegaScope', mount='EQ2', viewfinder='Red Dot')
>>> myscope.add_eyepiece(focal_length=25, name='MyOcular', brand='SuperBrand', afov=50)
>>> pprint.pprint(myscope.get_info())
{'additional info': {'mount': 'EQ2', 'viewfinder': 'Red Dot'},
'angular resolution (Dawes)': '1.16"',
'angular resolution (Rayleigh)': '1.38"',
'aperture': '100mm',
'brand': 'My Scope',
'eyepieces': {'MyOcular': {'afov': 50,
'brand': 'SuperBrand',
'focal_length': 25}},
'focal length': '1000mm',
'focal ratio': 'f/10',
'max magnification': '200X',
'name': 'My Scope'}
"""
info = {
'name': self.name,
'brand': self.name,
'focal length': '{}mm'.format(self.focal_length),
'aperture': '{}mm'.format(self.aperture),
'max magnification': self.get_max_magnification(),
'focal ratio': self.get_focal_ratio(),
'angular resolution (Dawes)': self.get_dawes_limit(),
'angular resolution (Rayleigh)': self.get_rayleigh_criterion(),
'eyepieces': self.eyepieces,
'additional info': self.additional_info
}
return info
| mit | 3,952,873,404,401,258,000 | 41.615917 | 120 | 0.592238 | false | 3.862026 | false | false | false |
Ion-Petcu/StockTrainer | prices/service/stream.py | 1 | 1498 | import json
import time
import tornado.web
from pymongo import CursorType
from sse import Sse
from tornado.iostream import StreamClosedError
class StreamHandler(tornado.web.RequestHandler):
def initialize(self):
self.set_header('Content-Type', 'text/event-stream')
self.set_header('Cache-Control', 'no-cache')
self.set_header('X-Accel-Buffering', 'no')
self.sse = Sse()
self.stream = True
def on_connection_close(self):
self.stream = False
super().on_connection_close()
async def publish(self, message=None):
try:
if message is not None:
self.sse.add_message('message', message)
for item in self.sse:
self.write(item)
await self.flush()
except StreamClosedError:
self.stream = False
async def get(self):
# Send retry option to client
await self.publish()
ts = time.time() - 120 # last 2 minutes
collection = self.settings['db'].prices
cursor = collection.find({'ts': {'$gt': ts}}, cursor_type=CursorType.TAILABLE_AWAIT)
while self.stream:
if not cursor.alive:
cursor = collection.find({'ts': {'$gt': ts}}, cursor_type=CursorType.TAILABLE_AWAIT)
if (await cursor.fetch_next):
doc = cursor.next_object()
doc.pop('_id')
ts = doc['ts']
await self.publish(json.dumps(doc))
| apache-2.0 | 6,930,875,075,670,951,000 | 30.208333 | 100 | 0.581442 | false | 4.070652 | false | false | false |
gridpp/dirac-getting-started | cernatschool/test_pixel.py | 3 | 1163 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#...the usual suspects.
import os, inspect
#...for the unit testing.
import unittest
#...for the logging.
import logging as lg
#...for the pixel wrapper class.
from pixel import Pixel
class PixelTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_create_pixel(self):
p = Pixel(100, 200, 1234, -1, 256, 256)
# The tests
#-----------
self.assertEqual(p.get_x(), 100)
self.assertEqual(p.get_y(), 200)
self.assertEqual(p.getX(), 51300)
self.assertEqual(p.getC(), 1234)
self.assertEqual(p.get_mask(), -1)
self.assertEqual(p.get_neighbours(), {})
self.assertEqual(p.pixel_entry(), "{\"x\":100, \"y\":200, \"c\":1234},\n")
if __name__ == "__main__":
lg.basicConfig(filename='log_test_pixel.txt', filemode='w', level=lg.DEBUG)
lg.info("")
lg.info("===============================================")
lg.info(" Logger output from cernatschool/test_pixel.py ")
lg.info("===============================================")
lg.info("")
unittest.main()
| mit | -7,315,147,021,007,849,000 | 22.26 | 82 | 0.527945 | false | 3.430678 | true | false | false |
ErickMurillo/ciat_plataforma | monitoreo/indicador06/migrations/0001_initial.py | 3 | 19870 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Maderable'
db.create_table(u'indicador06_maderable', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('nombre', self.gf('django.db.models.fields.CharField')(max_length=200)),
))
db.send_create_signal(u'indicador06', ['Maderable'])
# Adding model 'Forrajero'
db.create_table(u'indicador06_forrajero', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('nombre', self.gf('django.db.models.fields.CharField')(max_length=200)),
))
db.send_create_signal(u'indicador06', ['Forrajero'])
# Adding model 'Energetico'
db.create_table(u'indicador06_energetico', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('nombre', self.gf('django.db.models.fields.CharField')(max_length=200)),
))
db.send_create_signal(u'indicador06', ['Energetico'])
# Adding model 'Frutal'
db.create_table(u'indicador06_frutal', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('nombre', self.gf('django.db.models.fields.CharField')(max_length=200)),
))
db.send_create_signal(u'indicador06', ['Frutal'])
# Adding model 'ExistenciaArboles'
db.create_table(u'indicador06_existenciaarboles', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('cantidad_maderable', self.gf('django.db.models.fields.IntegerField')()),
('cantidad_forrajero', self.gf('django.db.models.fields.IntegerField')()),
('cantidad_energetico', self.gf('django.db.models.fields.IntegerField')()),
('cantidad_frutal', self.gf('django.db.models.fields.IntegerField')()),
('encuesta', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['monitoreo.Encuesta'])),
))
db.send_create_signal(u'indicador06', ['ExistenciaArboles'])
# Adding M2M table for field maderable on 'ExistenciaArboles'
m2m_table_name = db.shorten_name(u'indicador06_existenciaarboles_maderable')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('existenciaarboles', models.ForeignKey(orm[u'indicador06.existenciaarboles'], null=False)),
('maderable', models.ForeignKey(orm[u'indicador06.maderable'], null=False))
))
db.create_unique(m2m_table_name, ['existenciaarboles_id', 'maderable_id'])
# Adding M2M table for field forrajero on 'ExistenciaArboles'
m2m_table_name = db.shorten_name(u'indicador06_existenciaarboles_forrajero')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('existenciaarboles', models.ForeignKey(orm[u'indicador06.existenciaarboles'], null=False)),
('forrajero', models.ForeignKey(orm[u'indicador06.forrajero'], null=False))
))
db.create_unique(m2m_table_name, ['existenciaarboles_id', 'forrajero_id'])
# Adding M2M table for field energetico on 'ExistenciaArboles'
m2m_table_name = db.shorten_name(u'indicador06_existenciaarboles_energetico')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('existenciaarboles', models.ForeignKey(orm[u'indicador06.existenciaarboles'], null=False)),
('energetico', models.ForeignKey(orm[u'indicador06.energetico'], null=False))
))
db.create_unique(m2m_table_name, ['existenciaarboles_id', 'energetico_id'])
# Adding M2M table for field frutal on 'ExistenciaArboles'
m2m_table_name = db.shorten_name(u'indicador06_existenciaarboles_frutal')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('existenciaarboles', models.ForeignKey(orm[u'indicador06.existenciaarboles'], null=False)),
('frutal', models.ForeignKey(orm[u'indicador06.frutal'], null=False))
))
db.create_unique(m2m_table_name, ['existenciaarboles_id', 'frutal_id'])
def backwards(self, orm):
# Deleting model 'Maderable'
db.delete_table(u'indicador06_maderable')
# Deleting model 'Forrajero'
db.delete_table(u'indicador06_forrajero')
# Deleting model 'Energetico'
db.delete_table(u'indicador06_energetico')
# Deleting model 'Frutal'
db.delete_table(u'indicador06_frutal')
# Deleting model 'ExistenciaArboles'
db.delete_table(u'indicador06_existenciaarboles')
# Removing M2M table for field maderable on 'ExistenciaArboles'
db.delete_table(db.shorten_name(u'indicador06_existenciaarboles_maderable'))
# Removing M2M table for field forrajero on 'ExistenciaArboles'
db.delete_table(db.shorten_name(u'indicador06_existenciaarboles_forrajero'))
# Removing M2M table for field energetico on 'ExistenciaArboles'
db.delete_table(db.shorten_name(u'indicador06_existenciaarboles_energetico'))
# Removing M2M table for field frutal on 'ExistenciaArboles'
db.delete_table(db.shorten_name(u'indicador06_existenciaarboles_frutal'))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'configuracion.areaaccion': {
'Meta': {'object_name': 'AreaAccion'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'configuracion.plataforma': {
'Meta': {'object_name': 'Plataforma'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'configuracion.sector': {
'Meta': {'object_name': 'Sector'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'configuracion.sitioaccion': {
'Meta': {'object_name': 'SitioAccion'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'indicador06.energetico': {
'Meta': {'object_name': 'Energetico'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'indicador06.existenciaarboles': {
'Meta': {'object_name': 'ExistenciaArboles'},
'cantidad_energetico': ('django.db.models.fields.IntegerField', [], {}),
'cantidad_forrajero': ('django.db.models.fields.IntegerField', [], {}),
'cantidad_frutal': ('django.db.models.fields.IntegerField', [], {}),
'cantidad_maderable': ('django.db.models.fields.IntegerField', [], {}),
'encuesta': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['monitoreo.Encuesta']"}),
'energetico': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['indicador06.Energetico']", 'symmetrical': 'False'}),
'forrajero': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['indicador06.Forrajero']", 'symmetrical': 'False'}),
'frutal': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['indicador06.Frutal']", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'maderable': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['indicador06.Maderable']", 'symmetrical': 'False'})
},
u'indicador06.forrajero': {
'Meta': {'object_name': 'Forrajero'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'indicador06.frutal': {
'Meta': {'object_name': 'Frutal'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'indicador06.maderable': {
'Meta': {'object_name': 'Maderable'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'lugar.comunidad': {
'Meta': {'ordering': "['nombre']", 'object_name': 'Comunidad'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'municipio': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['lugar.Municipio']"}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '40'})
},
u'lugar.departamento': {
'Meta': {'ordering': "['nombre']", 'object_name': 'Departamento'},
'extension': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'id': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'pais': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['lugar.Pais']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'unique': 'True', 'null': 'True'})
},
u'lugar.municipio': {
'Meta': {'ordering': "['departamento__nombre', 'nombre']", 'object_name': 'Municipio'},
'departamento': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['lugar.Departamento']"}),
'extension': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'id': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}),
'latitud': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '5', 'blank': 'True'}),
'longitud': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '5', 'blank': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'unique': 'True', 'null': 'True'})
},
u'lugar.pais': {
'Meta': {'object_name': 'Pais'},
'codigo': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'mapeo.organizaciones': {
'Meta': {'ordering': "[u'nombre']", 'unique_together': "((u'font_color', u'nombre'),)", 'object_name': 'Organizaciones'},
'area_accion': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['configuracion.AreaAccion']"}),
'contacto': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'correo_electronico': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'departamento': ('smart_selects.db_fields.ChainedForeignKey', [], {'to': u"orm['lugar.Departamento']"}),
'direccion': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'font_color': ('mapeo.models.ColorField', [], {'unique': 'True', 'max_length': '10', 'blank': 'True'}),
'fundacion': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'generalidades': ('ckeditor.fields.RichTextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logo': (u'sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'municipio': ('smart_selects.db_fields.ChainedForeignKey', [], {'to': u"orm['lugar.Municipio']"}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'pais': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['lugar.Pais']"}),
'plataforma': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['configuracion.Plataforma']"}),
'rss': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'sector': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['configuracion.Sector']"}),
'siglas': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'sitio_accion': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['configuracion.SitioAccion']"}),
'sitio_web': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'telefono': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'temas': ('ckeditor.fields.RichTextField', [], {'null': 'True', 'blank': 'True'})
},
u'mapeo.persona': {
'Meta': {'object_name': 'Persona'},
'cedula': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'comunidad': ('smart_selects.db_fields.ChainedForeignKey', [], {'to': u"orm['lugar.Comunidad']"}),
'departamento': ('smart_selects.db_fields.ChainedForeignKey', [], {'to': u"orm['lugar.Departamento']"}),
'edad': ('django.db.models.fields.IntegerField', [], {}),
'finca': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'municipio': ('smart_selects.db_fields.ChainedForeignKey', [], {'to': u"orm['lugar.Municipio']"}),
'nivel_educacion': ('django.db.models.fields.IntegerField', [], {}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organizacion': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'org'", 'symmetrical': 'False', 'to': u"orm['mapeo.Organizaciones']"}),
'pais': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['lugar.Pais']"}),
'sexo': ('django.db.models.fields.IntegerField', [], {})
},
u'monitoreo.encuesta': {
'Meta': {'object_name': 'Encuesta'},
'fecha': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jefe': ('django.db.models.fields.IntegerField', [], {}),
'productor': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['mapeo.Persona']"}),
'recolector': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['monitoreo.Recolector']"}),
'tipo_encuesta': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'year': ('django.db.models.fields.IntegerField', [], {})
},
u'monitoreo.recolector': {
'Meta': {'object_name': 'Recolector'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
}
}
complete_apps = ['indicador06'] | mit | -65,981,591,300,281,150 | 66.588435 | 195 | 0.574836 | false | 3.360961 | true | false | false |
CanonicalLtd/ubuntu-image | ubuntu_image/tests/test_hooks.py | 1 | 5000 | """Test the hook mechanism."""
import os
from contextlib import ExitStack
from tempfile import TemporaryDirectory
from textwrap import dedent
from ubuntu_image.hooks import HookError, HookManager
from unittest import TestCase
class TestHooks(TestCase):
def test_hook_compatibility(self):
# This test should be updated whenever NEW hooks are added. It is NOT
# allowed to remove any hooks from this test - it's present here to
# make sure no existing hooks have been
pass
def test_hook_fired(self):
with ExitStack() as resources:
hooksdir = resources.enter_context(TemporaryDirectory())
hookfile = os.path.join(hooksdir, 'test-hook')
resultfile = os.path.join(hooksdir, 'result')
env = {'UBUNTU_IMAGE_TEST_ENV': 'true'}
with open(hookfile, 'w') as fp:
fp.write("""\
#!/bin/sh
echo -n "$UBUNTU_IMAGE_TEST_ENV" >>{}
""".format(resultfile))
os.chmod(hookfile, 0o744)
manager = HookManager([hooksdir])
manager.fire('test-hook', env)
# Check if the script ran once as expected.
self.assertTrue(os.path.exists(resultfile))
with open(resultfile, 'r') as fp:
self.assertEqual(fp.read(), 'true')
def test_hook_fired_multiple_scripts(self):
with ExitStack() as resources:
hooksdir = resources.enter_context(TemporaryDirectory())
hookdir = os.path.join(hooksdir, 'test-hook.d')
hookfile1 = os.path.join(hookdir, 'dir-test-01')
hookfile2 = os.path.join(hookdir, 'dir-test-02')
hookfile3 = os.path.join(hooksdir, 'test-hook')
resultfile = os.path.join(hooksdir, 'result')
os.mkdir(hookdir)
def create_hook(path):
with open(path, 'w') as fp:
fp.write(dedent("""\
#!/bin/sh
echo "{}" >>{}
""".format(path, resultfile)))
os.chmod(path, 0o744)
create_hook(hookfile1)
create_hook(hookfile2)
create_hook(hookfile3)
manager = HookManager([hooksdir])
manager.fire('test-hook')
# Check if all the scripts for the hook were run and in the right
# order.
self.assertTrue(os.path.exists(resultfile))
with open(resultfile, 'r') as fp:
lines = fp.read().splitlines()
self.assertListEqual(
lines, [hookfile1, hookfile2, hookfile3])
def test_hook_multiple_directories(self):
with ExitStack() as resources:
hooksdir1 = resources.enter_context(TemporaryDirectory())
hooksdir2 = resources.enter_context(TemporaryDirectory())
hookdir = os.path.join(hooksdir1, 'test-hook.d')
hookfile1 = os.path.join(hookdir, 'dir-test-01')
hookfile2 = os.path.join(hooksdir2, 'test-hook')
# We write the results to one file to check if order is proper.
resultfile = os.path.join(hooksdir1, 'result')
os.mkdir(hookdir)
def create_hook(path):
with open(path, 'w') as fp:
fp.write(dedent("""\
#!/bin/sh
echo "{}" >>{}
""".format(path, resultfile)))
os.chmod(path, 0o744)
create_hook(hookfile1)
create_hook(hookfile2)
manager = HookManager([hooksdir1, hooksdir2])
manager.fire('test-hook')
# Check if all the scripts for the hook were run and in the right
# order.
self.assertTrue(os.path.exists(resultfile))
with open(resultfile, 'r') as fp:
lines = fp.read().splitlines()
self.assertListEqual(
lines, [hookfile1, hookfile2])
def test_hook_error(self):
with ExitStack() as resources:
hooksdir = resources.enter_context(TemporaryDirectory())
hookfile = os.path.join(hooksdir, 'test-hook')
with open(hookfile, 'w') as fp:
fp.write(dedent("""\
#!/bin/sh
echo -n "error" 1>&2
exit 1
"""))
os.chmod(hookfile, 0o744)
manager = HookManager([hooksdir])
# Check if hook script failures are properly reported
with self.assertRaises(HookError) as cm:
manager.fire('test-hook')
self.assertEqual(cm.exception.hook_name, 'test-hook')
self.assertEqual(cm.exception.hook_path, hookfile)
self.assertEqual(cm.exception.hook_retcode, 1)
self.assertEqual(cm.exception.hook_stderr, 'error')
| gpl-3.0 | 5,770,392,537,122,771,000 | 42.103448 | 78 | 0.5396 | false | 4.248088 | true | false | false |
evensonbryan/yocto-autobuilder | lib/python2.7/site-packages/sqlalchemy_migrate-0.7.2-py2.7.egg/migrate/changeset/databases/postgres.py | 140 | 1129 | """
`PostgreSQL`_ database specific implementations of changeset classes.
.. _`PostgreSQL`: http://www.postgresql.org/
"""
from migrate.changeset import ansisql
from sqlalchemy.databases import postgresql as sa_base
PGSchemaGenerator = sa_base.PGDDLCompiler
class PGColumnGenerator(PGSchemaGenerator, ansisql.ANSIColumnGenerator):
"""PostgreSQL column generator implementation."""
pass
class PGColumnDropper(ansisql.ANSIColumnDropper):
"""PostgreSQL column dropper implementation."""
pass
class PGSchemaChanger(ansisql.ANSISchemaChanger):
"""PostgreSQL schema changer implementation."""
pass
class PGConstraintGenerator(ansisql.ANSIConstraintGenerator):
"""PostgreSQL constraint generator implementation."""
pass
class PGConstraintDropper(ansisql.ANSIConstraintDropper):
"""PostgreSQL constaint dropper implementation."""
pass
class PGDialect(ansisql.ANSIDialect):
columngenerator = PGColumnGenerator
columndropper = PGColumnDropper
schemachanger = PGSchemaChanger
constraintgenerator = PGConstraintGenerator
constraintdropper = PGConstraintDropper
| gpl-2.0 | 2,808,827,514,245,865,500 | 25.880952 | 72 | 0.777679 | false | 4.30916 | false | false | false |
Yadnss/yadnss | scripts/extract_files.py | 1 | 1658 | #!/usr/bin/env python3
from dnestpy import PAKArchive
from pathlib import Path
##########
# Config
##########
# Required game files
required_files = {
# Required game archive
'Resource00.pak': [
# Files needed from that archive
'resource/uistring/uistring.xml',
'resource/ui/mainbar/skillicon*.dds'
],
'Resource04.pak': [
'resource/ext/jobtable.dnt',
'resource/ext/skillleveltable_character*.dnt',
'resource/ext/skillleveltable_totalskill.dnt',
'resource/ext/skilltable_character.dnt',
'resource/ext/skilltreetable.dnt',
]
}
# Folder to extract files to
outdir = Path('./extract')
##########
# Utility functions
##########
def valid_dnpath(path):
"""Ensure needed dragonnest files are in the directory"""
return all((path/f).is_file() for f in required_files)
def match_any(pakfile, patternset):
"""Returns true if path matches any pattern from paternset"""
return any(pakfile.path.match(p) for p in patternset)
##########
# Main Script
##########
print('Enter your dragonnest game folder e.g., C:\\Nexon\\DragonNest')
dnpath = Path(input('DragonNest path: '))
while not valid_dnpath(dnpath):
print('\nGame files not found')
print('The folder must contain "Resource00.pak" and "Resource04.pak"')
dnpath = Path(input('DragonNest path: '))
# Extract required files
for pakname, filepatterns in required_files.items():
with PAKArchive(dnpath/pakname) as pak:
pakfiles = filter(lambda x: match_any(x, filepatterns), pak.files)
for pakfile in pakfiles:
pakfile.extract(outdir, fullpath=False, overwrite=True)
| mit | -824,540,858,606,843,400 | 27.586207 | 74 | 0.658625 | false | 3.397541 | false | false | false |
DayGitH/Python-Challenges | DailyProgrammer/DP20170424A.py | 1 | 1796 | """
[2017-04-24] Challenge #312 [Easy] L33tspeak Translator
https://www.reddit.com/r/dailyprogrammer/comments/67dxts/20170424_challenge_312_easy_l33tspeak_translator/
# Description
L33tspeak - the act of speaking like a computer hacker (or hax0r) - was popularized in the late 1990s as a mechanism of
abusing ASCII art and character mappings to confuse outsiders. It was a lot of fun. [One popular comic
strip](http://megatokyo.com/strip/9) in 2000 showed just how far the joke ran.
In L33Tspeak you substitute letters for their rough outlines in ASCII characters, e.g. symbols or numbers. You can have
1:1 mappings (like E -> 3) or 1:many mappings (like W -> `//). So then you wind up with words like this:
BASIC => 6451C
ELEET => 31337 (pronounced elite)
WOW => `//0`//
MOM => (V)0(V)
## Mappings
For this challenge we'll be using a subset of American Standard Leetspeak:
A -> 4
B -> 6
E -> 3
I -> 1
L -> 1
M -> (V)
N -> (\)
O -> 0
S -> 5
T -> 7
V -> \/
W -> `//
Your challenge, should you choose to accept it, is to translate to and from L33T.
# Input Description
You'll be given a word or a short phrase, one per line, and asked to convert it from L33T or to L33T. Examples:
31337
storm
# Output Description
You should emit the translated words: Examples:
31337 -> eleet
storm -> 570R(V)
# Challenge Input
I am elite.
Da pain!
Eye need help!
3Y3 (\)33d j00 t0 g37 d4 d0c70r.
1 n33d m4 p1llz!
# Challenge Output
I am elite. -> 1 4m 37173
Da pain! -> D4 P41(\)!
Eye need help! -> 3Y3 (\)33D H31P!
3Y3 (\)33d j00 t0 g37 d4 d0c70r. -> Eye need j00 to get da doctor.
1 n33d m4 p1llz! -> I need ma pillz!
"""
def main():
pass
if __name__ == "__main__":
main()
| mit | -1,675,895,513,782,828,000 | 29.440678 | 119 | 0.644766 | false | 2.8736 | false | false | false |
khazhyk/geoq | geoq/core/forms.py | 4 | 6590 | # -*- coding: utf-8 -*-
# This technical data was produced for the U. S. Government under Contract No. W15P7T-13-C-F600, and
# is subject to the Rights in Technical Data-Noncommercial Items clause at DFARS 252.227-7013 (FEB 2012)
from django import forms
from django.forms.widgets import (RadioInput, RadioSelect, CheckboxInput,
CheckboxSelectMultiple)
from django.contrib.auth.models import User
from django.utils.html import escape, conditional_escape
from django.db.models import Max
from itertools import chain
from models import AOI, Job, Project
from maps.models import Layer, MapLayer
no_style = [RadioInput, RadioSelect, CheckboxInput, CheckboxSelectMultiple]
class StyledModelForm(forms.ModelForm):
"""
Adds the span5 (in reference to the Twitter Bootstrap element)
to form fields.
"""
cls = 'span5'
def __init__(self, *args, **kwargs):
super(StyledModelForm, self).__init__(*args, **kwargs)
for f in self.fields:
if type(self.fields[f].widget) not in no_style:
self.fields[f].widget.attrs['class'] = self.cls
class AOIForm(StyledModelForm):
class Meta:
fields = ('name', 'description', 'job', 'analyst',
'priority', 'status')
model = AOI
class ItemSelectWidget(forms.SelectMultiple):
def __init__(self, attrs=None, choices=(), option_title_field=''):
self.option_title_field = option_title_field
super(ItemSelectWidget, self).__init__(attrs, choices)
def render_option(self, selected_choices, option_value, option_label, option_title=''):
option_value = forms.util.force_text(option_value)
if option_value in selected_choices:
selected_html = u' selected="selected"'
if not self.allow_multiple_selected:
selected_choices.remove(option_value)
else:
selected_html = ''
return u'<option title="%s" value="%s"%s>%s</option>' % ( \
escape(option_title), escape(option_value), selected_html, conditional_escape(forms.util.force_text(option_label)))
def render_options(self, choices, selected_choices):
# Normalize to strings.
selected_choices = set(forms.util.force_text(v) for v in selected_choices)
choices = [(c[0], c[1], '') for c in choices]
more_choices = [(c[0], c[1]) for c in self.choices]
try:
option_title_list = [val_list[0] for val_list in self.choices.queryset.values_list(self.option_title_field)]
if len(more_choices) > len(option_title_list):
option_title_list = [''] + option_title_list # pad for empty label field
more_choices = [(c[0], c[1], option_title_list[more_choices.index(c)]) for c in more_choices]
except:
more_choices = [(c[0], c[1], '') for c in more_choices] # couldn't get title values
output = []
for option_value, option_label, option_title in chain(more_choices, choices):
if isinstance(option_label, (list, tuple)):
output.append(u'<optgroup label="%s">' % escape(forms.util.force_text(option_value)))
for option in option_label:
output.append(self.render_option(selected_choices, *option, **dict(option_title=option_title)))
output.append(u'</optgroup>')
else: # option_label is just a string
output.append(self.render_option(selected_choices, option_value, option_label, option_title))
return u'\n'.join(output)
class JobForm(StyledModelForm):
analysts = forms.ModelMultipleChoiceField(
queryset = User.objects.all(),
widget = ItemSelectWidget(option_title_field='email')
)
layers = forms.ModelMultipleChoiceField(
queryset = Layer.objects.all(),
widget = ItemSelectWidget()
)
class Meta:
fields = ('name', 'description', 'project', 'analysts',
'teams', 'reviewers', 'feature_types', 'required_courses', 'tags', 'layers')
model = Job
def __init__(self, project, *args, **kwargs):
super(JobForm, self).__init__(*args, **kwargs)
def remove_anonymous(field):
""" Removes anonymous from choices in form. """
field_var = self.fields[field].queryset.exclude(id=-1)
self.fields[field].queryset = field_var
return None
remove_anonymous('reviewers')
remove_anonymous('analysts')
self.fields['project'].initial = project
if 'data' in kwargs:
# If we're creating Job, we don't have a map
if self.instance.map == None:
return;
self.fields['analysts'].initial = kwargs['data'].getlist('analysts',None)
# must be a better way, but figure out the layers to display
layers_selected = set(kwargs['data'].getlist('layers',None))
layers_current_int = MapLayer.objects.filter(map=self.instance.map.id).values_list('layer_id', flat=True)
layers_current = set([unicode(i) for i in layers_current_int])
if layers_selected != layers_current:
# resolve differences
# first take out ones we want to remove
for x in layers_current - layers_selected:
MapLayer.objects.filter(map=self.instance.map.id,layer_id=x).delete()
# now add in new ones
layers = MapLayer.objects.filter(map=self.instance.map.id)
if layers.count() > 0:
max_stack_order = layers.aggregate(Max('stack_order')).values()[0]
else:
max_stack_order = 0
for x in layers_selected - layers_current:
max_stack_order+=1
ml = MapLayer.objects.create(map=self.instance.map,layer_id=int(x),stack_order=max_stack_order)
ml.save()
else:
if hasattr(kwargs['instance'],'analysts'):
self.fields['analysts'].initial = kwargs['instance'].analysts.all().values_list('id', flat=True)
else:
self.fields['analysts'].initial = []
if hasattr(kwargs['instance'],'map'):
self.fields['layers'].initial = [x.layer_id for x in kwargs['instance'].map.layers]
class ProjectForm(StyledModelForm):
class Meta:
fields = ('name', 'description', 'project_type', 'active', 'private')
model = Project
| mit | -5,319,748,840,823,283,000 | 43.829932 | 127 | 0.600303 | false | 4.023199 | false | false | false |
sunlightlabs/sarahs_inbox | mail_dedupe/views.py | 1 | 2307 | from settings import *
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.core.paginator import Paginator
from django.http import HttpResponse, HttpResponseRedirect
from urllib import unquote
from mail.models import *
from django.core.urlresolvers import reverse
from django.core.cache import cache
import re
import jellyfish
from mail.management.commands.mail_combine_people import Command as CombineCommand
def index(request):
if not DEBUG:
return
DEFAULT_DISTANCE = 0
person_into = request.GET.get('into', False)
victims = map(lambda x: int(x), request.GET.getlist('combine'))
if person_into is not False:
victims.remove(int(person_into))
args_array = [person_into] + victims
# call_command('mail_combine_people', *args_array)
combcomm = CombineCommand()
print person_into, victims
result = combcomm.merge(person_into, victims, noprint=True)
people = []
for p in Person.objects.filter(merged_into=None).order_by('name_hash'):
people.append({'obj': p, 'dist': DEFAULT_DISTANCE})
target_person = None
target_id = request.GET.get('id', False)
if target_id is not False:
target_person = Person.objects.get(id=target_id)
if target_person:
for (i,p) in enumerate(people):
people[i]['dist'] = jellyfish.jaro_distance(target_person.name_hash, p['obj'].name_hash)
people.sort(key=lambda x: x['dist'], reverse=True)
total = len(people)
template_vars = {
'people': people,
'total': total
}
return render_to_response('dedupe.html', template_vars, context_instance=RequestContext(request))
def emails(request):
person = Person.objects.get(id=request.GET.get('id'))
from_emails = Email.objects.filter(creator=person)
to_emails = Email.objects.filter(to=person)
cc_emails = Email.objects.filter(cc=person)
template_vars = {
'from_emails': from_emails,
'to_emails': to_emails,
'cc_emails': cc_emails
}
return render_to_response('dedupe_emails.html', template_vars, context_instance=RequestContext(request))
| bsd-3-clause | 5,683,710,004,020,094,000 | 31.055556 | 108 | 0.654096 | false | 3.794408 | false | false | false |
karstenw/nodebox-pyobjc | examples/Extended Application/matplotlib/examples/units/ellipse_with_units.py | 1 | 2861 | """
==================
Ellipse With Units
==================
Compare the ellipse generated with arcs versus a polygonal approximation
.. only:: builder_html
This example requires :download:`basic_units.py <basic_units.py>`
"""
from basic_units import cm
import numpy as np
from matplotlib import patches
import matplotlib.pyplot as plt
# nodebox section
if __name__ == '__builtin__':
# were in nodebox
import os
import tempfile
W = 800
inset = 20
size(W, 600)
plt.cla()
plt.clf()
plt.close('all')
def tempimage():
fob = tempfile.NamedTemporaryFile(mode='w+b', suffix='.png', delete=False)
fname = fob.name
fob.close()
return fname
imgx = 20
imgy = 0
def pltshow(plt, dpi=300):
global imgx, imgy
temppath = tempimage()
plt.savefig(temppath, dpi=dpi)
dx,dy = imagesize(temppath)
w = min(W,dx)
image(temppath,imgx,imgy,width=w)
imgy = imgy + dy + 20
os.remove(temppath)
size(W, HEIGHT+dy+40)
else:
def pltshow(mplpyplot):
mplpyplot.show()
# nodebox section end
xcenter, ycenter = 0.38*cm, 0.52*cm
width, height = 1e-1*cm, 3e-1*cm
angle = -30
theta = np.deg2rad(np.arange(0.0, 360.0, 1.0))
x = 0.5 * width * np.cos(theta)
y = 0.5 * height * np.sin(theta)
rtheta = np.radians(angle)
R = np.array([
[np.cos(rtheta), -np.sin(rtheta)],
[np.sin(rtheta), np.cos(rtheta)],
])
x, y = np.dot(R, np.array([x, y]))
x += xcenter
y += ycenter
###############################################################################
fig = plt.figure()
ax = fig.add_subplot(211, aspect='auto')
ax.fill(x, y, alpha=0.2, facecolor='yellow',
edgecolor='yellow', linewidth=1, zorder=1)
e1 = patches.Ellipse((xcenter, ycenter), width, height,
angle=angle, linewidth=2, fill=False, zorder=2)
ax.add_patch(e1)
ax = fig.add_subplot(212, aspect='equal')
ax.fill(x, y, alpha=0.2, facecolor='green', edgecolor='green', zorder=1)
e2 = patches.Ellipse((xcenter, ycenter), width, height,
angle=angle, linewidth=2, fill=False, zorder=2)
ax.add_patch(e2)
fig.savefig('ellipse_compare')
###############################################################################
fig = plt.figure()
ax = fig.add_subplot(211, aspect='auto')
ax.fill(x, y, alpha=0.2, facecolor='yellow',
edgecolor='yellow', linewidth=1, zorder=1)
e1 = patches.Arc((xcenter, ycenter), width, height,
angle=angle, linewidth=2, fill=False, zorder=2)
ax.add_patch(e1)
ax = fig.add_subplot(212, aspect='equal')
ax.fill(x, y, alpha=0.2, facecolor='green', edgecolor='green', zorder=1)
e2 = patches.Arc((xcenter, ycenter), width, height,
angle=angle, linewidth=2, fill=False, zorder=2)
ax.add_patch(e2)
fig.savefig('arc_compare')
pltshow(plt)
| mit | 2,664,902,525,860,449,300 | 24.318584 | 82 | 0.580916 | false | 3.046858 | false | false | false |
nwjs/chromium.src | third_party/blink/tools/blinkpy/web_tests/port/browser_test_unittest.py | 2 | 4100 | # Copyright (C) 2014 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import optparse
from blinkpy.common import exit_codes
from blinkpy.common.system.executive_mock import MockExecutive
from blinkpy.web_tests.models import test_run_results
from blinkpy.web_tests.port import browser_test
from blinkpy.web_tests.port import browser_test_driver
from blinkpy.web_tests.port import port_testcase
class _BrowserTestTestCaseMixin(object):
def test_driver_name_option(self):
self.assertTrue(self.make_port()._path_to_driver().endswith(self.driver_name_endswith))
def test_default_timeout_ms(self):
self.assertEqual(self.make_port(options=optparse.Values({'configuration': 'Release'})).default_timeout_ms(),
self.timeout_ms)
self.assertEqual(self.make_port(options=optparse.Values({'configuration': 'Debug'})).default_timeout_ms(),
3 * self.timeout_ms)
def test_driver_type(self):
self.assertTrue(isinstance(self.make_port(options=optparse.Values({'driver_name': 'browser_tests'})
).create_driver(1), browser_test_driver.BrowserTestDriver))
def test_web_tests_dir(self):
self.assertTrue(self.make_port().web_tests_dir().endswith('chrome/test/data/printing/layout_tests'))
def test_virtual_test_suites(self):
# The browser_tests port do not use virtual test suites, so we are just testing the stub.
port = self.make_port()
self.assertEqual(port.virtual_test_suites(), [])
def test_path_to_apache_config_file(self):
pass
class BrowserTestLinuxTest(_BrowserTestTestCaseMixin, port_testcase.PortTestCase):
port_name = 'linux'
port_maker = browser_test.BrowserTestLinuxPort
os_name = 'linux'
os_version = 'trusty'
driver_name_endswith = 'browser_tests'
timeout_ms = 10 * 1000
class BrowserTestWinTest(_BrowserTestTestCaseMixin, port_testcase.PortTestCase):
port_name = 'win'
port_maker = browser_test.BrowserTestWinPort
os_name = 'win'
os_version = 'win7'
driver_name_endswith = 'browser_tests.exe'
timeout_ms = 20 * 1000
class BrowserTestMacTest(_BrowserTestTestCaseMixin, port_testcase.PortTestCase):
os_name = 'mac'
os_version = 'mac10.11'
port_name = 'mac'
port_maker = browser_test.BrowserTestMacPort
driver_name_endswith = 'browser_tests'
timeout_ms = 20 * 1000
def test_driver_path(self):
test_port = self.make_port(options=optparse.Values({'driver_name': 'browser_tests'}))
self.assertNotIn('.app/Contents/MacOS', test_port._path_to_driver())
| bsd-3-clause | 4,028,099,067,378,074,000 | 42.617021 | 116 | 0.723171 | false | 3.984451 | true | false | false |
MarsBighead/mustang | Python/rPolsh.py | 1 | 1094 | #!/usr/bin/python
import re
def calculate(num1,num2,op):
if op == "+":
return str(num1+num2)
elif op == "-":
return str(num1-num2)
elif op == "*":
return str(num1*num2)
elif op == "/" and num2>0:
return str(num1/num2)
def evalRPN(List):
if len(List) == 3 and List[0].isdigit() and List[1].isdigit() and re.match('[\+\-\*\/]',List[2]):
return calculate(int(List[0]),int(List[1]),List[2])
else:
preList=[]
for i in range(len(List)-3):
if List[i].isdigit() and List[i+1].isdigit() and re.match('[\+\-\*\/]',List[i+2]):
preList.append(calculate(int(List[i]),int(List[i+1]),List[i+2]))
preList.extend(List[i+3:])
return evalRPN(preList)
else:
preList.append(List[i])
result = evalRPN(["1","2","+"])
print "Simple result: ",result
testList =[
["5","1","2","+","4","*","+","3","-"],
["4","13","5","/","+"],
["2","1","+","3","*"],
]
for testL in testList:
tResult = int(evalRPN(testL))
print "RPN result:\t",tResult
| mit | 3,895,570,864,865,920,000 | 26.35 | 102 | 0.5 | false | 2.89418 | false | false | false |
jeromecc/doctoctocbot | src/community/migrations/0001_initial.py | 1 | 1560 | # Generated by Django 2.2.6 on 2019-10-10 08:35
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('bot', '0001_initial'),
('crowdfunding', '0001_initial'),
('moderation', '0001_initial'),
('conversation', '0001_initial'),
('sites', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Community',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=101, unique=True)),
('active', models.BooleanField(default=False)),
('created', models.DateTimeField(auto_now_add=True)),
('account', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='bot.Account')),
('crowdfunding', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='community', to='crowdfunding.Project')),
('hashtag', models.ManyToManyField(to='conversation.Hashtag')),
('membership', models.ManyToManyField(related_name='member_of', to='moderation.Category')),
('site', models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='community', to='sites.Site')),
],
options={
'verbose_name_plural': 'communities',
},
),
]
| mpl-2.0 | -2,918,086,637,465,020,000 | 41.162162 | 161 | 0.594872 | false | 4.148936 | false | false | false |
NicovincX2/Python-3.5 | Algorithmique/Mathématiques discrètes/Théorie de l'information/Théorie des codes/Distance de Levenshtein/min_edit_distance.py | 1 | 1090 | # -*- coding: utf-8 -*-
import os
# min string edit distance dynamic programming example
def minEditDistance(s1, s2):
"""Compute minimum edit distance converting s1 -> s2"""
m = {}
len1 = len(s1)
len2 = len(s2)
maxlen = max(len1, len2)
m = [None] * (len2 + 1)
for i in range(len2 + 1):
m[i] = [0] * (len1 + 1)
# set up initial costs on horizontal
for j in range(1, len1 + 1):
m[0][j] = j
# now prepare costs for vertical
for i in range(1, len2 + 1):
m[i][0] = i
# compute best
for i in range(1, len2 + 1):
for j in range(1, len1 + 1):
cost = 1
if s1[j - 1] == s2[i - 1]:
cost = 0
# cost of changing [i][j] character
# cost of removing character from sj
# cost of adding character to si
replaceCost = m[i - 1][j - 1] + cost
removeCost = m[i - 1][j] + 1
addCost = m[i][j - 1] + 1
m[i][j] = min(replaceCost, removeCost, addCost)
return m[len2][len1]
os.system("pause")
| gpl-3.0 | 629,654,641,110,707,300 | 23.222222 | 59 | 0.500917 | false | 3.15942 | false | false | false |
anushbmx/kitsune | kitsune/questions/management/commands/update_weekly_votes.py | 1 | 1135 | from datetime import datetime, timedelta
from django.core.management.base import BaseCommand
from kitsune.questions.models import Question, QuestionVote
from kitsune.questions.tasks import update_question_vote_chunk
from kitsune.sumo.utils import chunked
class Command(BaseCommand):
help = "Keep the num_votes_past_week value accurate."
def handle(self, **options):
# Get all questions (id) with a vote in the last week.
recent = datetime.now() - timedelta(days=7)
q = QuestionVote.objects.filter(created__gte=recent)
q = q.values_list('question_id', flat=True).order_by('question')
q = q.distinct()
q_with_recent_votes = list(q)
# Get all questions with num_votes_past_week > 0
q = Question.objects.filter(num_votes_past_week__gt=0)
q = q.values_list('id', flat=True)
q_with_nonzero_votes = list(q)
# Union.
qs_to_update = list(set(q_with_recent_votes + q_with_nonzero_votes))
# Chunk them for tasks.
for chunk in chunked(qs_to_update, 50):
update_question_vote_chunk.apply_async(args=[chunk])
| bsd-3-clause | -2,490,800,972,994,052,000 | 35.612903 | 76 | 0.664317 | false | 3.557994 | false | false | false |
slobodz/TeamServices | project/server/lexicon/views.py | 1 | 8622 | # project/server/admin/views.py
#################
#### imports ####
#################
import datetime
from flask import render_template, Blueprint, url_for, \
redirect, flash, request, make_response, abort, jsonify
from flask_login import login_required
from werkzeug.debug import get_current_traceback
from sqlalchemy import and_
from .forms import LanguageForm, LanguageModForm, LexiconForm, LexiconSearchForm
from .. import db
from ..models import Lexicon, Language, Translation, Permission, ProductLexicon, Product
from ..config import BaseConfig
from ..utils import datetime_utcnow, Audit, get_value_with_fallback
from ..decorators import admin_required, permission_required
################
#### config ####
################
LEXICON_BLUEPRINT = Blueprint('lexicon', __name__,)
##################
#### language ####
##################
@LEXICON_BLUEPRINT.route('/language', methods=['GET', 'POST'])
@login_required
@permission_required(Permission.POWERUSER)
def language():
form = LanguageForm(request.form)
if form.validate_on_submit():
try:
lang = Language(
locale_code=form.locale_code.data,
description=form.description.data,
description_en=form.description_en.data
)
db.session.add(lang)
db.session.commit()
flash('Thank you for adding language.', 'success')
except:
db.session.rollback()
flash('Something went wrong when adding language.', 'danger')
languages = Language.query.all()
return render_template('lexicon/language.html', form=form, languages=languages)
@LEXICON_BLUEPRINT.route('/language/mod/<locale_code>', methods=['POST', 'GET'])
@LEXICON_BLUEPRINT.route('/language/mod/', methods=['POST'])
@login_required
@permission_required(Permission.POWERUSER)
def mod_language(locale_code=None):
if request.method == 'GET':
lang = Language.query.filter_by(locale_code=locale_code).first()
form = LanguageModForm(obj=lang)
if request.method == 'POST':
form = LanguageModForm(request.form)
locale_code=form.locale_code.data
lang = Language.query.filter_by(locale_code=locale_code).first()
#this will run for POST only
if form.validate_on_submit():
try:
lang.description=form.description.data
lang.description_en=form.description_en.data
lang.mod_date=datetime_utcnow()
lang.process_code=Audit.PROCESS_WEB
lang.process_status=Audit.STATUS_SUCCESS
db.session.commit()
flash('Thank you for saving language.', 'success')
except:
db.session.rollback()
flash('Something went wrong when saving language.', 'danger')
languages = Language.query.all()
return render_template('lexicon/language.html', form=form, languages=languages, locale_code=locale_code)
@LEXICON_BLUEPRINT.route('/language/del/<locale_code>', methods=['GET'])
@login_required
@permission_required(Permission.POWERUSER)
def del_language(locale_code):
lang = Language.query.filter_by(locale_code=locale_code).first()
try:
db.session.delete(lang)
db.session.commit()
flash('Language deleted.', 'info')
except:
track = get_current_traceback(skip=1, show_hidden_frames=True, ignore_system_exceptions=False)
track.log()
db.session.rollback()
flash('Something went wrong when deleting language.', 'danger')
return redirect(url_for('lexicon.language'))
##################
#### lexicon #####
##################
@LEXICON_BLUEPRINT.route('/items', methods=['GET'])
@LEXICON_BLUEPRINT.route('/items/<int:page>', methods=['GET'])
@login_required
@permission_required(Permission.POWERUSER)
def items(page=1):
"""Return all items."""
form = LexiconSearchForm()
description = get_value_with_fallback("lexicon_description")
if request.method == 'GET':
if description:
form.lexicon_description.data = description
lexicons = Lexicon.query.filter(Lexicon.description.like("%"+description+"%")).order_by(Lexicon.description).paginate(page,BaseConfig.PRODUCTS_PER_PAGE,error_out=False)
else:
lexicons = Lexicon.query.order_by(Lexicon.description).paginate(page,BaseConfig.PRODUCTS_PER_PAGE,error_out=False)
response = make_response(render_template('lexicon/items.html', form=form, lexicons=lexicons))
expires = datetime.datetime.now() + datetime.timedelta(days=365)
if description:
response.set_cookie("lexicon_description", description, expires=expires)
return response
@LEXICON_BLUEPRINT.route('/item', methods=['GET', 'POST'])
@login_required
@permission_required(Permission.POWERUSER)
def newitem():
"""Empty form setup."""
form = LexiconForm()
form.products.choices = []
return render_template('lexicon/item.html', form=form)
@LEXICON_BLUEPRINT.route('/item/<lexicon_id>', methods=['GET'])
@login_required
@permission_required(Permission.POWERUSER)
def item(lexicon_id):
"""display lexicon"""
lexicon = Lexicon.query.filter_by(lexicon_id=lexicon_id).first()
if lexicon:
form = LexiconForm(obj=lexicon)
for t in lexicon.translations:
form.locale_code.data=t.locale_code
form.translation_description.data=t.description
break
form.products.choices = [(p.product_id, p.product_code, True) for p in lexicon.products]
return render_template('lexicon/item.html', form=form, lexicon_item=lexicon)
@LEXICON_BLUEPRINT.route('/save', methods=['POST'])
@login_required
@permission_required(Permission.POWERUSER)
def save():
"""Add or modify Lexicon data"""
form = LexiconForm(request.form)
if form.validate_on_submit():
lexicon = Lexicon.query.filter_by(lexicon_id=form.lexicon_id.data).first()
translation = Translation.query.filter(and_(Translation.lexicon_id == form.lexicon_id.data, Translation.locale_code == form.locale_code.data)).first()
try:
if translation:
translation.description = form.translation_description.data
translation.mod_date = datetime_utcnow()
translation.process_code = Audit.PROCESS_WEB
translation.process_status = Audit.STATUS_SUCCESS
else:
translation = Translation(
locale_code=form.locale_code.data,
description=form.translation_description.data,
)
if lexicon:
if translation not in lexicon.translations:
lexicon.translations.append(translation)
lexicon.description=form.description.data
lexicon.tags=form.tags.data
lexicon.lexicon_type_code=form.lexicon_type_code.data,
lexicon.mod_date=datetime_utcnow(),
lexicon.process_code=Audit.PROCESS_WEB,
lexicon.process_status=Audit.STATUS_SUCCESS
else:
lexicon = Lexicon(
lexicon_type_code=form.lexicon_type_code.data,
description=form.description.data,
tags=form.tags.data
)
lexicon.translations.append(translation)
db.session.add(lexicon)
if form.products.data:
lexicon.products[:] = []
for product_id in form.products.data:
product = Product.query.filter_by(product_id=int(product_id)).first()
if product:
productlexicon = ProductLexicon(
lexicon_id=form.lexicon_id.data,
product_id=product.product_id,
product_code=product.product_code
)
lexicon.products.append(productlexicon)
form.products.choices = [(p.product_id, p.product_code, True) for p in lexicon.products]
else:
lexicon.products[:] = []
form.products.choices = []
flash('Thank you for saving lexicon.', 'success')
db.session.commit()
except:
track = get_current_traceback(skip=1, show_hidden_frames=True, ignore_system_exceptions=False)
track.log()
db.session.rollback()
flash('Something went wrong when adding new customer.', 'danger')
return render_template('lexicon/item.html', form=form)
| mit | -8,554,701,266,725,774,000 | 40.652174 | 180 | 0.623057 | false | 4.032741 | false | false | false |
clouserw/olympia | apps/amo/utils.py | 1 | 35130 | import chardet
import codecs
import collections
import contextlib
import datetime
import errno
import functools
import itertools
import operator
import os
import random
import re
import shutil
import time
import unicodedata
import urllib
import urlparse
import django.core.mail
from django import http
from django.conf import settings
from django.contrib import messages
from django.core import paginator
from django.core.cache import cache
from django.core.files.storage import (FileSystemStorage,
default_storage as storage)
from django.core.serializers import json
from django.core.validators import validate_slug, ValidationError
from django.forms.fields import Field
from django.http import HttpRequest
from django.template import Context, loader
from django.utils import translation
from django.utils.encoding import smart_str, smart_unicode
from django.utils.functional import Promise
from django.utils.http import urlquote
import bleach
import elasticutils.contrib.django as elasticutils
import html5lib
import jinja2
import pyes.exceptions as pyes
import pytz
from babel import Locale
from cef import log_cef as _log_cef
from django_statsd.clients import statsd
from easy_thumbnails import processors
from html5lib.serializer.htmlserializer import HTMLSerializer
from jingo import env
from PIL import Image, ImageFile, PngImagePlugin
import amo.search
from amo import ADDON_ICON_SIZES
from amo.urlresolvers import linkify_with_outgoing, reverse
from translations.models import Translation
from users.models import UserNotification
from users.utils import UnsubscribeCode
from . import logger_log as log
heka = settings.HEKA
days_ago = lambda n: datetime.datetime.now() - datetime.timedelta(days=n)
def urlparams(url_, hash=None, **query):
"""
Add a fragment and/or query paramaters to a URL.
New query params will be appended to exising parameters, except duplicate
names, which will be replaced.
"""
url = urlparse.urlparse(url_)
fragment = hash if hash is not None else url.fragment
# Use dict(parse_qsl) so we don't get lists of values.
q = url.query
query_dict = dict(urlparse.parse_qsl(smart_str(q))) if q else {}
query_dict.update((k, v) for k, v in query.items())
query_string = urlencode([(k, v) for k, v in query_dict.items()
if v is not None])
new = urlparse.ParseResult(url.scheme, url.netloc, url.path, url.params,
query_string, fragment)
return new.geturl()
def isotime(t):
"""Date/Time format according to ISO 8601"""
if not hasattr(t, 'tzinfo'):
return
return _append_tz(t).astimezone(pytz.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
def epoch(t):
"""Date/Time converted to seconds since epoch"""
if not hasattr(t, 'tzinfo'):
return
return int(time.mktime(_append_tz(t).timetuple()))
def _append_tz(t):
tz = pytz.timezone(settings.TIME_ZONE)
return tz.localize(t)
def sorted_groupby(seq, key):
"""
Given a sequence, we sort it and group it by a key.
key should be a string (used with attrgetter) or a function.
"""
if not hasattr(key, '__call__'):
key = operator.attrgetter(key)
return itertools.groupby(sorted(seq, key=key), key=key)
def paginate(request, queryset, per_page=20, count=None):
"""
Get a Paginator, abstracting some common paging actions.
If you pass ``count``, that value will be used instead of calling
``.count()`` on the queryset. This can be good if the queryset would
produce an expensive count query.
"""
p = (ESPaginator if isinstance(queryset, (amo.search.ES, elasticutils.S))
else paginator.Paginator)(queryset, per_page)
if count is not None:
p._count = count
# Get the page from the request, make sure it's an int.
try:
page = int(request.GET.get('page', 1))
except ValueError:
page = 1
# Get a page of results, or the first page if there's a problem.
try:
paginated = p.page(page)
except (paginator.EmptyPage, paginator.InvalidPage):
paginated = p.page(1)
paginated.url = u'%s?%s' % (request.path, request.GET.urlencode())
return paginated
def send_mail(subject, message, from_email=None, recipient_list=None,
fail_silently=False, use_blacklist=True, perm_setting=None,
manage_url=None, headers=None, cc=None, real_email=False,
html_message=None, attachments=None, async=False,
max_retries=None):
"""
A wrapper around django.core.mail.EmailMessage.
Adds blacklist checking and error logging.
"""
from amo.helpers import absolutify
from amo.tasks import send_email
import users.notifications as notifications
if not recipient_list:
return True
if isinstance(recipient_list, basestring):
raise ValueError('recipient_list should be a list, not a string.')
# Check against user notification settings
if perm_setting:
if isinstance(perm_setting, str):
perm_setting = notifications.NOTIFICATIONS_BY_SHORT[perm_setting]
perms = dict(UserNotification.objects
.filter(user__email__in=recipient_list,
notification_id=perm_setting.id)
.values_list('user__email', 'enabled'))
d = perm_setting.default_checked
recipient_list = [e for e in recipient_list
if e and perms.setdefault(e, d)]
# Prune blacklisted emails.
if use_blacklist:
white_list = []
for email in recipient_list:
if email and email.lower() in settings.EMAIL_BLACKLIST:
log.debug('Blacklisted email removed from list: %s' % email)
else:
white_list.append(email)
else:
white_list = recipient_list
if not from_email:
from_email = settings.DEFAULT_FROM_EMAIL
if cc:
# If not basestring, assume it is already a list.
if isinstance(cc, basestring):
cc = [cc]
if not headers:
headers = {}
def send(recipient, message, **options):
kwargs = {
'async': async,
'attachments': attachments,
'cc': cc,
'fail_silently': fail_silently,
'from_email': from_email,
'headers': headers,
'html_message': html_message,
'max_retries': max_retries,
'real_email': real_email,
}
kwargs.update(options)
# Email subject *must not* contain newlines
args = (recipient, ' '.join(subject.splitlines()), message)
if async:
return send_email.delay(*args, **kwargs)
else:
return send_email(*args, **kwargs)
if white_list:
if perm_setting:
html_template = loader.get_template('amo/emails/unsubscribe.html')
text_template = loader.get_template('amo/emails/unsubscribe.ltxt')
if not manage_url:
manage_url = urlparams(absolutify(
reverse('users.edit', add_prefix=False)),
'acct-notify')
for recipient in white_list:
# Add unsubscribe link to footer.
token, hash = UnsubscribeCode.create(recipient)
unsubscribe_url = absolutify(reverse('users.unsubscribe',
args=[token, hash, perm_setting.short],
add_prefix=False))
context_options = {
'message': message,
'manage_url': manage_url,
'unsubscribe_url': unsubscribe_url,
'perm_setting': perm_setting.label,
'SITE_URL': settings.SITE_URL,
'mandatory': perm_setting.mandatory,
}
# Render this template in the default locale until
# bug 635840 is fixed.
with no_translation():
context = Context(context_options, autoescape=False)
message_with_unsubscribe = text_template.render(context)
if html_message:
context_options['message'] = html_message
with no_translation():
context = Context(context_options, autoescape=False)
html_with_unsubscribe = html_template.render(context)
result = send([recipient], message_with_unsubscribe,
html_message=html_with_unsubscribe,
attachments=attachments)
else:
result = send([recipient], message_with_unsubscribe,
attachments=attachments)
else:
result = send(recipient_list, message=message,
html_message=html_message, attachments=attachments)
else:
result = True
return result
def send_mail_jinja(subject, template, context, *args, **kwargs):
"""Sends mail using a Jinja template with autoescaping turned off.
Jinja is especially useful for sending email since it has whitespace
control.
"""
# Get a jinja environment so we can override autoescaping for text emails.
autoescape_orig = env.autoescape
env.autoescape = False
template = env.get_template(template)
msg = send_mail(subject, template.render(context), *args, **kwargs)
env.autoescape = autoescape_orig
return msg
def send_html_mail_jinja(subject, html_template, text_template, context,
*args, **kwargs):
"""Sends HTML mail using a Jinja template with autoescaping turned off."""
autoescape_orig = env.autoescape
env.autoescape = False
html_template = env.get_template(html_template)
text_template = env.get_template(text_template)
msg = send_mail(subject, text_template.render(context),
html_message=html_template.render(context), *args,
**kwargs)
env.autoescape = autoescape_orig
return msg
class JSONEncoder(json.DjangoJSONEncoder):
def default(self, obj):
from versions.models import ApplicationsVersions
unicodable = (Translation, Promise)
if isinstance(obj, unicodable):
return unicode(obj)
if isinstance(obj, ApplicationsVersions):
return {unicode(obj.application): {'min': unicode(obj.min),
'max': unicode(obj.max)}}
return super(JSONEncoder, self).default(obj)
def chunked(seq, n):
"""
Yield successive n-sized chunks from seq.
>>> for group in chunked(range(8), 3):
... print group
[0, 1, 2]
[3, 4, 5]
[6, 7]
"""
seq = iter(seq)
while 1:
rv = list(itertools.islice(seq, 0, n))
if not rv:
break
yield rv
def urlencode(items):
"""A Unicode-safe URLencoder."""
try:
return urllib.urlencode(items)
except UnicodeEncodeError:
return urllib.urlencode([(k, smart_str(v)) for k, v in items])
def randslice(qs, limit, exclude=None):
"""
Get a random slice of items from ``qs`` of size ``limit``.
There will be two queries. One to find out how many elements are in ``qs``
and another to get a slice. The count is so we don't go out of bounds.
If exclude is given, we make sure that pk doesn't show up in the slice.
This replaces qs.order_by('?')[:limit].
"""
cnt = qs.count()
# Get one extra in case we find the element that should be excluded.
if exclude is not None:
limit += 1
rand = 0 if limit > cnt else random.randint(0, cnt - limit)
slice_ = list(qs[rand:rand + limit])
if exclude is not None:
slice_ = [o for o in slice_ if o.pk != exclude][:limit - 1]
return slice_
# Extra characters outside of alphanumerics that we'll allow.
SLUG_OK = '-_~'
def slugify(s, ok=SLUG_OK, lower=True, spaces=False, delimiter='-'):
# L and N signify letter/number.
# http://www.unicode.org/reports/tr44/tr44-4.html#GC_Values_Table
rv = []
for c in smart_unicode(s):
cat = unicodedata.category(c)[0]
if cat in 'LN' or c in ok:
rv.append(c)
if cat == 'Z': # space
rv.append(' ')
new = ''.join(rv).strip()
if not spaces:
new = re.sub('[-\s]+', delimiter, new)
return new.lower() if lower else new
def slug_validator(s, ok=SLUG_OK, lower=True, spaces=False, delimiter='-',
message=validate_slug.message, code=validate_slug.code):
"""
Raise an error if the string has any punctuation characters.
Regexes don't work here because they won't check alnums in the right
locale.
"""
if not (s and slugify(s, ok, lower, spaces, delimiter) == s):
raise ValidationError(message, code=code)
def raise_required():
raise ValidationError(Field.default_error_messages['required'])
def clear_messages(request):
"""
Clear any messages out of the messages framework for the authenticated
user.
Docs: http://bit.ly/dEhegk
"""
for message in messages.get_messages(request):
pass
def clean_nl(string):
"""
This will clean up newlines so that nl2br can properly be called on the
cleaned text.
"""
html_blocks = ['{http://www.w3.org/1999/xhtml}blockquote',
'{http://www.w3.org/1999/xhtml}ol',
'{http://www.w3.org/1999/xhtml}li',
'{http://www.w3.org/1999/xhtml}ul']
if not string:
return string
def parse_html(tree):
# In etree, a tag may have:
# - some text content (piece of text before its first child)
# - a tail (piece of text just after the tag, and before a sibling)
# - children
# Eg: "<div>text <b>children's text</b> children's tail</div> tail".
# Strip new lines directly inside block level elements: first new lines
# from the text, and:
# - last new lines from the tail of the last child if there's children
# (done in the children loop below).
# - or last new lines from the text itself.
if tree.tag in html_blocks:
if tree.text:
tree.text = tree.text.lstrip('\n')
if not len(tree): # No children.
tree.text = tree.text.rstrip('\n')
# Remove the first new line after a block level element.
if tree.tail and tree.tail.startswith('\n'):
tree.tail = tree.tail[1:]
for child in tree: # Recurse down the tree.
if tree.tag in html_blocks:
# Strip new lines directly inside block level elements: remove
# the last new lines from the children's tails.
if child.tail:
child.tail = child.tail.rstrip('\n')
parse_html(child)
return tree
parse = parse_html(html5lib.parseFragment(string))
# Serialize the parsed tree back to html.
walker = html5lib.treewalkers.getTreeWalker('etree')
stream = walker(parse)
serializer = HTMLSerializer(quote_attr_values=True,
omit_optional_tags=False)
return serializer.render(stream)
# From: http://bit.ly/eTqloE
# Without this, you'll notice a slight grey line on the edges of
# the adblock plus icon.
def patched_chunk_tRNS(self, pos, len):
i16 = PngImagePlugin.i16
s = ImageFile._safe_read(self.fp, len)
if self.im_mode == "P":
self.im_info["transparency"] = map(ord, s)
elif self.im_mode == "L":
self.im_info["transparency"] = i16(s)
elif self.im_mode == "RGB":
self.im_info["transparency"] = i16(s), i16(s[2:]), i16(s[4:])
return s
PngImagePlugin.PngStream.chunk_tRNS = patched_chunk_tRNS
def patched_load(self):
if self.im and self.palette and self.palette.dirty:
apply(self.im.putpalette, self.palette.getdata())
self.palette.dirty = 0
self.palette.rawmode = None
try:
trans = self.info["transparency"]
except KeyError:
self.palette.mode = "RGB"
else:
try:
for i, a in enumerate(trans):
self.im.putpalettealpha(i, a)
except TypeError:
self.im.putpalettealpha(trans, 0)
self.palette.mode = "RGBA"
if self.im:
return self.im.pixel_access(self.readonly)
Image.Image.load = patched_load
def resize_image(src, dst, size=None, remove_src=True, locally=False):
"""Resizes and image from src, to dst. Returns width and height.
When locally is True, src and dst are assumed to reside
on the local disk (not in the default storage). When dealing
with local files it's up to you to ensure that all directories
exist leading up to the dst filename.
"""
if src == dst:
raise Exception("src and dst can't be the same: %s" % src)
open_ = open if locally else storage.open
delete = os.unlink if locally else storage.delete
with open_(src, 'rb') as fp:
im = Image.open(fp)
im = im.convert('RGBA')
if size:
im = processors.scale_and_crop(im, size)
with open_(dst, 'wb') as fp:
im.save(fp, 'png')
if remove_src:
delete(src)
return im.size
def remove_icons(destination):
for size in ADDON_ICON_SIZES:
filename = '%s-%s.png' % (destination, size)
if storage.exists(filename):
storage.delete(filename)
class ImageCheck(object):
def __init__(self, image):
self._img = image
def is_image(self):
try:
self._img.seek(0)
self.img = Image.open(self._img)
# PIL doesn't tell us what errors it will raise at this point,
# just "suitable ones", so let's catch them all.
self.img.verify()
return True
except:
log.error('Error decoding image', exc_info=True)
return False
def is_animated(self, size=100000):
if not self.is_image():
return False
img = self.img
if img.format == 'PNG':
self._img.seek(0)
data = ''
while True:
chunk = self._img.read(size)
if not chunk:
break
data += chunk
acTL, IDAT = data.find('acTL'), data.find('IDAT')
if acTL > -1 and acTL < IDAT:
return True
return False
elif img.format == 'GIF':
# See the PIL docs for how this works:
# http://www.pythonware.com/library/pil/handbook/introduction.htm
try:
img.seek(1)
except EOFError:
return False
return True
class MenuItem():
"""Refinement item with nestable children for use in menus."""
url, text, selected, children = ('', '', False, [])
def to_language(locale):
"""Like django's to_language, but en_US comes out as en-US."""
# A locale looks like en_US or fr.
if '_' in locale:
return to_language(translation.trans_real.to_language(locale))
# Django returns en-us but we want to see en-US.
elif '-' in locale:
lang, region = locale.split('-')
return '%s-%s' % (lang, region.upper())
else:
return translation.trans_real.to_language(locale)
def get_locale_from_lang(lang):
"""Pass in a language (u'en-US') get back a Locale object courtesy of
Babel. Use this to figure out currencies, bidi, names, etc."""
# Special fake language can just act like English for formatting and such
if not lang or lang == 'dbg':
lang = 'en'
return Locale(translation.to_locale(lang))
class HttpResponseSendFile(http.HttpResponse):
def __init__(self, request, path, content=None, status=None,
content_type='application/octet-stream', etag=None):
self.request = request
self.path = path
super(HttpResponseSendFile, self).__init__('', status=status,
content_type=content_type)
if settings.XSENDFILE:
self[settings.XSENDFILE_HEADER] = path
if etag:
self['ETag'] = '"%s"' % etag
def __iter__(self):
if settings.XSENDFILE:
return iter([])
chunk = 4096
fp = open(self.path, 'rb')
if 'wsgi.file_wrapper' in self.request.META:
return self.request.META['wsgi.file_wrapper'](fp, chunk)
else:
self['Content-Length'] = os.path.getsize(self.path)
def wrapper():
while 1:
data = fp.read(chunk)
if not data:
break
yield data
return wrapper()
def redirect_for_login(request):
# We can't use urlparams here, because it escapes slashes,
# which a large number of tests don't expect
url = '%s?to=%s' % (reverse('users.login'),
urlquote(request.get_full_path()))
return http.HttpResponseRedirect(url)
def cache_ns_key(namespace, increment=False):
"""
Returns a key with namespace value appended. If increment is True, the
namespace will be incremented effectively invalidating the cache.
Memcache doesn't have namespaces, but we can simulate them by storing a
"%(key)s_namespace" value. Invalidating the namespace simply requires
editing that key. Your application will no longer request the old keys,
and they will eventually fall off the end of the LRU and be reclaimed.
"""
ns_key = 'ns:%s' % namespace
if increment:
try:
ns_val = cache.incr(ns_key)
except ValueError:
log.info('Cache increment failed for key: %s. Resetting.' % ns_key)
ns_val = epoch(datetime.datetime.now())
cache.set(ns_key, ns_val, 0)
else:
ns_val = cache.get(ns_key)
if ns_val is None:
ns_val = epoch(datetime.datetime.now())
cache.set(ns_key, ns_val, 0)
return '%s:%s' % (ns_val, ns_key)
def get_email_backend(real_email=False):
"""Get a connection to an email backend.
If settings.SEND_REAL_EMAIL is False, a debugging backend is returned.
"""
if real_email or settings.SEND_REAL_EMAIL:
backend = None
else:
backend = 'amo.mail.FakeEmailBackend'
return django.core.mail.get_connection(backend)
class ESPaginator(paginator.Paginator):
"""A better paginator for search results."""
# The normal Paginator does a .count() query and then a slice. Since ES
# results contain the total number of results, we can take an optimistic
# slice and then adjust the count.
def page(self, number):
# Fake num_pages so it looks like we can have results.
self._num_pages = float('inf')
number = self.validate_number(number)
self._num_pages = None
bottom = (number - 1) * self.per_page
top = bottom + self.per_page
page = paginator.Page(self.object_list[bottom:top], number, self)
# Force the search to evaluate and then attach the count.
list(page.object_list)
self._count = page.object_list.count()
return page
def smart_path(string):
"""Returns a string you can pass to path.path safely."""
if os.path.supports_unicode_filenames:
return smart_unicode(string)
return smart_str(string)
def log_cef(name, severity, env, *args, **kwargs):
"""Simply wraps the cef_log function so we don't need to pass in the config
dictionary every time. See bug 707060. env can be either a request
object or just the request.META dictionary"""
c = {'cef.product': getattr(settings, 'CEF_PRODUCT', 'AMO'),
'cef.vendor': getattr(settings, 'CEF_VENDOR', 'Mozilla'),
'cef.version': getattr(settings, 'CEF_VERSION', '0'),
'cef.device_version': getattr(settings, 'CEF_DEVICE_VERSION', '0'),
'cef.file': getattr(settings, 'CEF_FILE', 'syslog'), }
# The CEF library looks for some things in the env object like
# REQUEST_METHOD and any REMOTE_ADDR stuff. Django not only doesn't send
# half the stuff you'd expect, but it specifically doesn't implement
# readline on its FakePayload object so these things fail. I have no idea
# if that's outdated code in Django or not, but andym made this
# <strike>awesome</strike> less crappy so the tests will actually pass.
# In theory, the last part of this if() will never be hit except in the
# test runner. Good luck with that.
if isinstance(env, HttpRequest):
r = env.META.copy()
if 'PATH_INFO' in r:
r['PATH_INFO'] = env.build_absolute_uri(r['PATH_INFO'])
elif isinstance(env, dict):
r = env
else:
r = {}
if settings.USE_HEKA_FOR_CEF:
return heka.cef(name, severity, r, *args, config=c, **kwargs)
else:
return _log_cef(name, severity, r, *args, config=c, **kwargs)
@contextlib.contextmanager
def no_translation(lang=None):
"""
Activate the settings lang, or lang provided, while in context.
"""
old_lang = translation.trans_real.get_language()
if lang:
translation.trans_real.activate(lang)
else:
translation.trans_real.deactivate()
yield
translation.trans_real.activate(old_lang)
def escape_all(v):
"""Escape html in JSON value, including nested items."""
if isinstance(v, basestring):
v = jinja2.escape(smart_unicode(v))
v = linkify_with_outgoing(v)
return v
elif isinstance(v, list):
for i, lv in enumerate(v):
v[i] = escape_all(lv)
elif isinstance(v, dict):
for k, lv in v.iteritems():
v[k] = escape_all(lv)
elif isinstance(v, Translation):
v = jinja2.escape(smart_unicode(v.localized_string))
return v
class LocalFileStorage(FileSystemStorage):
"""Local storage to an unregulated absolute file path.
Unregulated means that, unlike the default file storage, you can write to
any path on the system if you have access.
Unlike Django's default FileSystemStorage, this class behaves more like a
"cloud" storage system. Specifically, you never have to write defensive
code that prepares for leading directory paths to exist.
"""
def __init__(self, base_url=None):
super(LocalFileStorage, self).__init__(location='/', base_url=base_url)
def delete(self, name):
"""Delete a file or empty directory path.
Unlike the default file system storage this will also delete an empty
directory path. This behavior is more in line with other storage
systems like S3.
"""
full_path = self.path(name)
if os.path.isdir(full_path):
os.rmdir(full_path)
else:
return super(LocalFileStorage, self).delete(name)
def _open(self, name, mode='rb'):
if mode.startswith('w'):
parent = os.path.dirname(self.path(name))
try:
# Try/except to prevent race condition raising "File exists".
os.makedirs(parent)
except OSError as e:
if e.errno == errno.EEXIST and os.path.isdir(parent):
pass
else:
raise
return super(LocalFileStorage, self)._open(name, mode=mode)
def path(self, name):
"""Actual file system path to name without any safety checks."""
return os.path.normpath(os.path.join(self.location,
self._smart_path(name)))
def _smart_path(self, string):
if os.path.supports_unicode_filenames:
return smart_unicode(string)
return smart_str(string)
def strip_bom(data):
"""
Strip the BOM (byte order mark) from byte string `data`.
Returns a new byte string.
"""
for bom in (codecs.BOM_UTF32_BE,
codecs.BOM_UTF32_LE,
codecs.BOM_UTF16_BE,
codecs.BOM_UTF16_LE,
codecs.BOM_UTF8):
if data.startswith(bom):
data = data[len(bom):]
break
return data
def smart_decode(s):
"""Guess the encoding of a string and decode it."""
if isinstance(s, unicode):
return s
enc_guess = chardet.detect(s)
try:
return s.decode(enc_guess['encoding'])
except (UnicodeDecodeError, TypeError), exc:
msg = 'Error decoding string (encoding: %r %.2f%% sure): %s: %s'
log.error(msg % (enc_guess['encoding'],
enc_guess['confidence'] * 100.0,
exc.__class__.__name__, exc))
return unicode(s, errors='replace')
def attach_trans_dict(model, objs):
"""Put all translations into a translations dict."""
# Get the ids of all the translations we need to fetch.
fields = model._meta.translated_fields
ids = [getattr(obj, f.attname) for f in fields
for obj in objs if getattr(obj, f.attname, None) is not None]
# Get translations in a dict, ids will be the keys. It's important to
# consume the result of sorted_groupby, which is an iterator.
qs = Translation.objects.filter(id__in=ids, localized_string__isnull=False)
all_translations = dict((k, list(v)) for k, v in
sorted_groupby(qs, lambda trans: trans.id))
def get_locale_and_string(translation, new_class):
"""Convert the translation to new_class (making PurifiedTranslations
and LinkifiedTranslations work) and return locale / string tuple."""
converted_translation = new_class()
converted_translation.__dict__ = translation.__dict__
return (converted_translation.locale.lower(),
unicode(converted_translation))
# Build and attach translations for each field on each object.
for obj in objs:
obj.translations = collections.defaultdict(list)
for field in fields:
t_id = getattr(obj, field.attname, None)
field_translations = all_translations.get(t_id, None)
if not t_id or field_translations is None:
continue
obj.translations[t_id] = [get_locale_and_string(t, field.rel.to)
for t in field_translations]
def rm_local_tmp_dir(path):
"""Remove a local temp directory.
This is just a wrapper around shutil.rmtree(). Use it to indicate you are
certain that your executing code is operating on a local temp dir, not a
directory managed by the Django Storage API.
"""
return shutil.rmtree(path)
def rm_local_tmp_file(path):
"""Remove a local temp file.
This is just a wrapper around os.unlink(). Use it to indicate you are
certain that your executing code is operating on a local temp file, not a
path managed by the Django Storage API.
"""
return os.unlink(path)
def timestamp_index(index):
"""Returns index-YYYYMMDDHHMMSS with the current time."""
return '%s-%s' % (index, datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
def create_es_index_if_missing(index, config=None, aliased=False):
"""Creates an index if it's not present.
Returns the index name. It may change if it was aliased.
Options:
- index: name of the index.
- config: if provided, used as the settings option for the
ES calls.
- aliased: If set to true, the index is suffixed with a timestamp
and an alias with the index name is created.
"""
es = amo.search.get_es()
if aliased:
alias = index
try:
indices = es.get_alias(alias)
if len(indices) > 1:
raise ValueError("The %r alias should not point to "
"several indices" % index)
# we're good here - the alias and the index exist
return indices[0]
except pyes.IndexMissingException:
# no alias exists, so we want to
# create a fresh one and a fresh index
index = timestamp_index(index)
if settings.IN_TEST_SUITE:
if not config:
config = {}
# Be nice to ES running on ci.mozilla.org
config.update({'number_of_shards': 3,
'number_of_replicas': 0})
try:
es.create_index_if_missing(index, settings=config)
if aliased:
try:
es.add_alias(alias, [index])
except pyes.ElasticSearchException, exc:
log.info('ES error creating alias: %s' % exc)
except pyes.ElasticSearchException, exc:
log.info('ES error creating index: %s' % exc)
return index
def timer(*func, **kwargs):
"""
Outputs statsd timings for the decorated method, ignored if not
in test suite. It will give us a name that's based on the module name.
It will work without params. Or with the params:
key: a key to override the calculated one
test_only: only time while in test suite (default is True)
"""
key = kwargs.get('key', None)
test_only = kwargs.get('test_only', True)
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kw):
if test_only and not settings.IN_TEST_SUITE:
return func(*args, **kw)
else:
name = (key if key else
'%s.%s' % (func.__module__, func.__name__))
with statsd.timer('timer.%s' % name):
return func(*args, **kw)
return wrapper
if func:
return decorator(func[0])
return decorator
def find_language(locale):
"""
Return a locale we support, or None.
"""
if not locale:
return None
LANGS = settings.AMO_LANGUAGES + settings.HIDDEN_LANGUAGES
if locale in LANGS:
return locale
# Check if locale has a short equivalent.
loc = settings.SHORTER_LANGUAGES.get(locale)
if loc:
return loc
# Check if locale is something like en_US that needs to be converted.
locale = to_language(locale)
if locale in LANGS:
return locale
return None
def has_links(html):
"""Return True if links (text or markup) are found in the given html."""
# Call bleach.linkify to transform text links to real links, and add some
# content to the ``href`` attribute. If the result is different from the
# initial string, links were found.
class LinkFound(Exception):
pass
def raise_on_link(attrs, new):
raise LinkFound
try:
bleach.linkify(html, callbacks=[raise_on_link])
except LinkFound:
return True
return False
| bsd-3-clause | 5,517,376,139,347,201,000 | 32.681687 | 79 | 0.605835 | false | 4.049568 | false | false | false |
vladan-m/ggrc-core | src/ggrc/models/product.py | 1 | 1733 | # Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: david@reciprocitylabs.com
# Maintained By: david@reciprocitylabs.com
from ggrc import db
from sqlalchemy.orm import validates
from .mixins import deferred, BusinessObject, Timeboxed, CustomAttributable
from .object_control import Controllable
from .object_document import Documentable
from .object_objective import Objectiveable
from .object_owner import Ownable
from .object_person import Personable
from .object_section import Sectionable
from .relationship import Relatable
from .utils import validate_option
from .track_object_state import HasObjectState, track_state_for_class
class Product(HasObjectState,
CustomAttributable, Documentable, Personable, Objectiveable, Controllable,
Sectionable, Relatable, Timeboxed, Ownable, BusinessObject, db.Model):
__tablename__ = 'products'
kind_id = deferred(db.Column(db.Integer), 'Product')
version = deferred(db.Column(db.String), 'Product')
kind = db.relationship(
'Option',
primaryjoin='and_(foreign(Product.kind_id) == Option.id, '\
'Option.role == "product_type")',
uselist=False,
)
_publish_attrs = [
'kind',
'version',
]
_sanitize_html = [
'version',
]
@validates('kind')
def validate_product_options(self, key, option):
return validate_option(self.__class__.__name__, key, option, 'product_type')
@classmethod
def eager_query(cls):
from sqlalchemy import orm
query = super(Product, cls).eager_query()
return query.options(orm.joinedload('kind'))
track_state_for_class(Product) | apache-2.0 | -5,752,924,752,019,698,000 | 31.716981 | 80 | 0.718984 | false | 3.702991 | false | false | false |
clclcocoro/MLwithGA | test/create_testdata.py | 1 | 3831 | #!/usr/bin/env python
import sys
import random
"""
Usage: create_testdata.py <interval> <sequence_length> [--randomscore]
<interval> integer that is bigger than 8
<sequence_length> integer that is bigger than 9
--randomscore score is sampled from discrete random distribution
binding residue randint(3, 5)
the both sides residues of binding residue randint(-10, -8)
non-binding residue randint(-2, 0)
"""
# start is the first binding residue index.
# interval is the number of residues between binding residues.
def generate_pssm(start, sequence_length, interval, random_flag=False):
pssm = []
for i in xrange(sequence_length):
if i % interval == start:
if random_flag:
pssm.append(map(str, [random.randint(3, 5) for i in xrange(20)]))
else:
pssm.append(map(str, [1]*20))
elif i % interval == start-1 or i % interval == start+1:
if random_flag:
pssm.append(map(str, [random.randint(-10, -8) for i in xrange(20)]))
else:
pssm.append(map(str, [-1]*20))
else:
if random_flag:
pssm.append(map(str, [random.randint(-2, 0) for i in xrange(20)]))
else:
pssm.append(map(str, [0]*20))
return pssm
if sys.argv[1] == "-h" or sys.argv[1] == "-help" or sys.argv[1] == "--help":
print """
Usage: create_testdata.py <interval> <sequence_length> [--randomscore]
<interval> integer that is bigger than 8
<sequence_length> integer that is bigger than 9
--randomscore score is sampled from discrete random distribution
binding residue randint(1, 10)
the both sides residues of binding residue randint(-10, -8)
non-binding residue randint(-7, 0)
"""
sys.exit(0)
interval = int(sys.argv[1])
if not interval > 8:
raise ValueError("<interval> must be bigger than 8")
interval += 1 # modify for xrange()
sequence_length = int(sys.argv[2])
if not sequence_length > 9:
raise ValueError("<sequence_length> must be bigger than 9")
random_flag = False
if len(sys.argv) == 4 and sys.argv[3] == "--randomscore":
random_flag = True
sequence_length = int(sys.argv[2])
bindres_file = "./bindingData.txt"
if random_flag:
pssms_file = "./pssms_random_score.txt"
else:
pssms_file = "./pssms_fixed_score.txt"
with open(bindres_file, "w") as fp:
startA = 1
startB = 2
startC = 3
binding_site_indexA = ' '.join(map(str, [i+startA for i in xrange(0, sequence_length, interval)]))
binding_site_indexB = ' '.join(map(str, [i+startB for i in xrange(0, sequence_length, interval)]))
binding_site_indexC = ' '.join(map(str, [i+startC for i in xrange(0, sequence_length, interval)]))
fp.write("http://purl.uniprot.org/uniprot/AAAAAA {}\n".format(binding_site_indexA))
fp.write("http://purl.uniprot.org/uniprot/BBBBBB {}\n".format(binding_site_indexB))
fp.write("http://purl.uniprot.org/uniprot/CCCCCC {}\n".format(binding_site_indexC))
with open(pssms_file, "w") as fp:
fp.write(">http://purl.uniprot.org/uniprot/AAAAAA\n")
pssm = '\n'.join(map('\t'.join, generate_pssm(startA, sequence_length, interval, random_flag)))
fp.write(pssm+"\n")
fp.write(">http://purl.uniprot.org/uniprot/BBBBBB\n")
pssm = '\n'.join(map('\t'.join, generate_pssm(startB, sequence_length, interval, random_flag)))
fp.write(pssm+"\n")
fp.write(">http://purl.uniprot.org/uniprot/CCCCCC\n")
pssm = '\n'.join(map('\t'.join, generate_pssm(startC, sequence_length, interval, random_flag)))
fp.write(pssm+"\n")
| mit | 3,714,160,811,061,304,300 | 43.034483 | 102 | 0.601149 | false | 3.334204 | false | false | false |
squisher/stella | stella/codegen.py | 1 | 6389 | #!/usr/bin/env python
# Copyright 2013-2015 David Mohr
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import llvmlite.ir as ll
import llvmlite.binding as llvm
import logging
import time
from . import tp
from . import ir
from . import exc
class CGEnv(object):
module = None
builder = None
class Program(object):
def __init__(self, module):
llvm.initialize()
llvm.initialize_native_target()
llvm.initialize_native_asmprinter()
self.module = module
self.module.translate()
self.cge = CGEnv()
self.cge.module = module
self.llvm = self.makeStub()
for _, func in self.module.namestore.all(ir.Function):
self.blockAndCode(func)
self.target_machine = llvm.Target.from_default_triple().create_target_machine()
logging.debug("Verifying... ")
self._llmod = None
def llmod(self):
if not self._llmod:
self._llmod = llvm.parse_assembly(str(self.module.llvm))
return self._llmod
def blockAndCode(self, impl):
func = impl.llvm
# create blocks
bb = func.append_basic_block("entry")
for bc in impl.bytecodes:
if bc.discard:
impl.remove(bc)
impl.log.debug("BLOCK skipped {0}".format(bc))
continue
newblock = ''
if bc in impl.incoming_jumps:
assert not bc.block
bc.block = func.append_basic_block(str(bc.loc))
bb = bc.block
newblock = ' NEW BLOCK (' + str(bc.loc) + ')'
else:
bc.block = bb
impl.log.debug("BLOCK'D {0}{1}".format(bc, newblock))
for ext_module in self.module.getExternalModules():
ext_module.translate(self.module.llvm)
impl.log.debug("Printing all bytecodes:")
impl.bytecodes.printAll(impl.log)
impl.log.debug("Emitting code:")
bb = None
cge = self.cge
for bc in impl.bytecodes:
try:
if bb != bc.block:
# new basic block, use a new builder
cge.builder = ll.IRBuilder(bc.block)
if bc.reachable:
bc.translate(cge)
impl.log.debug("TRANS'D {0}".format(bc.locStr()))
else:
# eliminate unreachable code, which may occur in the middle of a function
impl.log.debug("UNREACH {}".format(bc.locStr()))
except exc.StellaException as e:
e.addDebug(bc.debuginfo)
raise
def makeStub(self):
impl = self.module.entry
func_tp = ll.FunctionType(impl.result.type.llvmType(self.module), [])
func = ll.Function(self.module.llvm, func_tp, name=str(impl.function)+'__stub__')
bb = func.append_basic_block("entry")
builder = ll.IRBuilder(bb)
self.cge.builder = builder
for name, var in self.module.namestore.all(ir.GlobalVariable):
var.translate(self.cge)
llvm_args = [arg.translate(self.cge) for arg in self.module.entry_args]
call = builder.call(impl.llvm, llvm_args)
if impl.result.type is tp.Void:
builder.ret_void()
else:
builder.ret(call)
return func
def elapsed(self):
if self.start is None or self.end is None:
return None
return self.end - self.start
def optimize(self, opt):
if opt is not None:
logging.warn("Running optimizations level {0}... ".format(opt))
# TODO was build_pass_managers(tm, opt=opt, loop_vectorize=True, fpm=False)
pmb = llvm.create_pass_manager_builder()
pmb.opt_level = opt
pm = llvm.create_module_pass_manager()
pmb.populate(pm)
pm.run(self.llmod())
def destruct(self):
self.module.destruct()
del self.module
def __del__(self):
logging.debug("DEL {}: {}".format(repr(self), hasattr(self, 'module')))
def run(self, stats):
logging.debug("Preparing execution...")
import ctypes
import llvmlite
import os
_lib_dir = os.path.dirname(llvm.ffi.__file__)
clib = ctypes.CDLL(os.path.join(_lib_dir, llvmlite.utils.get_library_name()))
# Direct access as below mangles the name
# f = clib.__powidf2
f = getattr(clib, '__powidf2')
llvm.add_symbol('__powidf2', ctypes.cast(f, ctypes.c_void_p).value)
with llvm.create_mcjit_compiler(self.llmod(), self.target_machine) as ee:
ee.finalize_object()
entry = self.module.entry
ret_type = entry.result.type
logging.info("running {0}{1}".format(entry,
list(zip(entry.type_.arg_types,
self.module.entry_args))))
entry_ptr = ee.get_pointer_to_global(self.llmod().get_function(self.llvm.name))
ret_ctype = entry.result.type.Ctype()
if ret_type.on_heap:
ret_ctype = ctypes.POINTER(ret_ctype)
cfunc = ctypes.CFUNCTYPE(ret_ctype)(entry_ptr)
time_start = time.time()
retval = cfunc()
stats['elapsed'] = time.time() - time_start
for arg in self.module.entry_args:
arg.ctype2Python(self.cge) # may be a no-op if not necessary
retval = ret_type.unpack(retval)
logging.debug("Returning...")
self.destruct()
return retval
def getAssembly(self):
return self.target_machine.emit_assembly(self.llmod())
def getLlvmIR(self):
ret = self.module.getLlvmIR()
logging.debug("Returning...")
self.destruct()
return ret
| apache-2.0 | -4,684,738,077,236,187,000 | 30.628713 | 93 | 0.571451 | false | 3.858092 | false | false | false |
jeffmahoney/crash-python | contrib/xfs-analyze.py | 1 | 5408 | #!/usr/bin/python3
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
# bsc#1025860
# This script cross references items in the AIL with buffers and inodes
# locked in every task's stack
from crash.types.list import list_for_each_entry
from crash.util import container_of
import gdb
dentry_type = gdb.lookup_type('struct dentry')
ail_type = gdb.lookup_type('struct xfs_ail')
xfs_log_item_type = gdb.lookup_type('struct xfs_log_item')
xfs_inode_log_item_type = gdb.lookup_type('struct xfs_inode_log_item')
ail = gdb.Value(0xffff885e3b9e3a40).cast(ail_type.pointer()).dereference()
print ail
# This should go into a crash.types.rwsem
RWSEM_ACTIVE_MASK = 0xffffffffL
RWSEM_UNLOCKED_VALUE = 0
RWSEM_ACTIVE_BIAS = 1
RWSEM_WAITING_BIAS = 0xffffffff00000000
RWSEM_ACTIVE_READ_BIAS = 1
RWSEM_ACTIVE_WRITE_BIAS = 0xffffffff00000001
def inode_paths(inode):
for dentry in list_for_each_entry(inode['i_dentry'], dentry_type, ''):
names = [dentry['d_name']['name'].string()]
parent = dentry['d_parent']
while parent.address != parent['d_parent'].address:
names.insert(0, parent['d_name']['name'].string())
parent = parent['d_parent']
yield '/'.join(names)
def rwsem_read_trylock(rwsem):
count = int(rwsem['count']) & 0xffffffffffffffffL
if count == 0:
return True
if count & RWSEM_ACTIVE_WRITE_BIAS:
return False
if count >= 0:
return True
locked_inodes = {}
def check_item(item):
if item['li_type'] == 0x123b: # inode
iitem = container_of(item, xfs_inode_log_item_type, 'ili_item')
if iitem['ili_inode']['i_pincount']['counter'] > 0:
# print "<pinned {:16x}>".format(iitem['ili_inode'].address)
return 1
if not rwsem_read_trylock(iitem['ili_inode']['i_lock']['mr_lock']):
inode = iitem['ili_inode']['i_vnode'].address
# print "<locked {}>".format(inode)
print oct(int(inode['i_mode']))
if int(inode) in locked_inodes:
print "in AIL multiple times"
else:
locked_inodes[int(inode)] = iitem['ili_inode']
# for path in inode_paths(inode):
# print path
return 2
# print "<ok>"
elif item['li_type'] == 0x123c: # buffer
pass
else:
print "*** Odd type {}".format(item['li_type'])
return 0
# superblock ffff885e2ec11000
# fs_info 0xffff885e33f7e000
# m_ail 0xffff885e3b9e3a40
last_pushed = ail['xa_last_pushed_lsn']
target = ail['xa_target']
found = None
count = 0
last_lsn = 0
total = 0
for item in list_for_each_entry(ail['xa_ail'], xfs_log_item_type, 'li_ail'):
# xfsaild_push fast forwards to the last pushed before starting
# pushes are two (three, kind of) stages for inodes, which most of
# the ail list is for this report
# 1) attempt to push the inode item, which writes it back to its buffer
# 2) upon success, attempt to push the buffer
# 3) when the buffer is successfully written, the callback is called
# which removes the item from the list
# The list prior to last_pushed contains the items for which we're
# waiting on writeback.
if item['li_lsn'] < last_pushed:
count += 1
continue
if last_lsn == 0:
print "Skipped {} items before last_pushed ({})".format(count, last_pushed)
count = 0
elif item['li_lsn'] > target:
print "** Target LSN reached: {}".format(target)
break
total += 1
if last_lsn != item['li_lsn']:
if last_lsn != 0:
print "*** {:<4} total items for LSN {} ({} ready, {} pinned, {} locked)".format(count, last_lsn, ready, pinned, locked)
count = 0
# print "*** Processing LSN {}".format(item['li_lsn'])
pinned = 0
locked = 0
ready = 0
ret = check_item(item)
if ret == 1:
pinned += 1
elif ret == 2:
locked += 1
else:
if locked and ready == 0:
print "<{} locked>".format(locked)
ready += 1
last_lsn = item['li_lsn']
count += 1
# We only care about the first 100 items
if count > 104:
break
checked = 0
dead = 0
for thread in gdb.selected_inferior().threads():
thread.switch()
try:
f = gdb.selected_frame()
while True:
f = f.older()
fn = f.function()
if not fn:
break
if fn.name == '__fput':
fp = f.read_var('file')
inode = fp['f_path']['dentry']['d_inode']
checked += 1
if inode in locked_inodes:
print inode
break
if fn.name == 'vfs_create':
try:
inode = f.read_var('dir')
except ValueError as e:
print f
inode = None
checked += 1
if int(inode) in locked_inodes:
print "PID {} inode {}".format(thread.ptid, hex(int(inode)))
dead += 1
break
except gdb.error as e:
pass
print "Checked {} inodes in __fput or vfs_create".format(checked)
print "Total items processed: {}".format(total)
print "Total inodes tracked: {}".format(len(locked_inodes.keys()))
print "Total inodes locked and waiting: {}".format(dead)
| gpl-2.0 | 4,874,240,074,689,909,000 | 31.190476 | 132 | 0.575074 | false | 3.511688 | false | false | false |
joshuahoman/vivisect | vstruct/defs/windows/win_6_2_i386/ntoskrnl.py | 7 | 455587 | # Version: 6.2
# Architecture: i386
import vstruct
from vstruct.primitives import *
KPROCESS_STATE = v_enum()
KPROCESS_STATE.ProcessInMemory = 0
KPROCESS_STATE.ProcessOutOfMemory = 1
KPROCESS_STATE.ProcessInTransition = 2
KPROCESS_STATE.ProcessOutTransition = 3
KPROCESS_STATE.ProcessInSwap = 4
KPROCESS_STATE.ProcessOutSwap = 5
KPROCESS_STATE.ProcessAllSwapStates = 6
MI_STORE_BIT_TYPE = v_enum()
MI_STORE_BIT_TYPE.MiStoreBitTypeInStore = 0
MI_STORE_BIT_TYPE.MiStoreBitTypeEvicted = 1
MI_STORE_BIT_TYPE.MiStoreBitTypeMax = 2
IO_ALLOCATION_ACTION = v_enum()
IO_ALLOCATION_ACTION.KeepObject = 1
IO_ALLOCATION_ACTION.DeallocateObject = 2
IO_ALLOCATION_ACTION.DeallocateObjectKeepRegisters = 3
EX_GEN_RANDOM_DOMAIN = v_enum()
EX_GEN_RANDOM_DOMAIN.ExGenRandomDomainKernel = 0
EX_GEN_RANDOM_DOMAIN.ExGenRandomDomainFirst = 0
EX_GEN_RANDOM_DOMAIN.ExGenRandomDomainUserVisible = 1
EX_GEN_RANDOM_DOMAIN.ExGenRandomDomainMax = 2
LOCK_OPERATION = v_enum()
LOCK_OPERATION.IoReadAccess = 0
LOCK_OPERATION.IoWriteAccess = 1
LOCK_OPERATION.IoModifyAccess = 2
CM_SHARE_DISPOSITION = v_enum()
CM_SHARE_DISPOSITION.CmResourceShareUndetermined = 0
CM_SHARE_DISPOSITION.CmResourceShareDeviceExclusive = 1
CM_SHARE_DISPOSITION.CmResourceShareDriverExclusive = 2
CM_SHARE_DISPOSITION.CmResourceShareShared = 3
KWAIT_BLOCK_STATE = v_enum()
KWAIT_BLOCK_STATE.WaitBlockBypassStart = 0
KWAIT_BLOCK_STATE.WaitBlockBypassComplete = 1
KWAIT_BLOCK_STATE.WaitBlockActive = 2
KWAIT_BLOCK_STATE.WaitBlockInactive = 3
KWAIT_BLOCK_STATE.WaitBlockAllStates = 4
PROCESSOR_CACHE_TYPE = v_enum()
PROCESSOR_CACHE_TYPE.CacheUnified = 0
PROCESSOR_CACHE_TYPE.CacheInstruction = 1
PROCESSOR_CACHE_TYPE.CacheData = 2
PROCESSOR_CACHE_TYPE.CacheTrace = 3
EVENT_TYPE = v_enum()
EVENT_TYPE.NotificationEvent = 0
EVENT_TYPE.SynchronizationEvent = 1
KSPIN_LOCK_QUEUE_NUMBER = v_enum()
KSPIN_LOCK_QUEUE_NUMBER.LockQueueUnusedSpare0 = 0
KSPIN_LOCK_QUEUE_NUMBER.LockQueueExpansionLock = 1
KSPIN_LOCK_QUEUE_NUMBER.LockQueueUnusedSpare2 = 2
KSPIN_LOCK_QUEUE_NUMBER.LockQueueSystemSpaceLock = 3
KSPIN_LOCK_QUEUE_NUMBER.LockQueueVacbLock = 4
KSPIN_LOCK_QUEUE_NUMBER.LockQueueMasterLock = 5
KSPIN_LOCK_QUEUE_NUMBER.LockQueueNonPagedPoolLock = 6
KSPIN_LOCK_QUEUE_NUMBER.LockQueueIoCancelLock = 7
KSPIN_LOCK_QUEUE_NUMBER.LockQueueWorkQueueLock = 8
KSPIN_LOCK_QUEUE_NUMBER.LockQueueIoVpbLock = 9
KSPIN_LOCK_QUEUE_NUMBER.LockQueueIoDatabaseLock = 10
KSPIN_LOCK_QUEUE_NUMBER.LockQueueIoCompletionLock = 11
KSPIN_LOCK_QUEUE_NUMBER.LockQueueNtfsStructLock = 12
KSPIN_LOCK_QUEUE_NUMBER.LockQueueAfdWorkQueueLock = 13
KSPIN_LOCK_QUEUE_NUMBER.LockQueueBcbLock = 14
KSPIN_LOCK_QUEUE_NUMBER.LockQueueMmNonPagedPoolLock = 15
KSPIN_LOCK_QUEUE_NUMBER.LockQueueUnusedSpare16 = 16
KSPIN_LOCK_QUEUE_NUMBER.LockQueueMaximumLock = 17
WHEA_ERROR_TYPE = v_enum()
WHEA_ERROR_TYPE.WheaErrTypeProcessor = 0
WHEA_ERROR_TYPE.WheaErrTypeMemory = 1
WHEA_ERROR_TYPE.WheaErrTypePCIExpress = 2
WHEA_ERROR_TYPE.WheaErrTypeNMI = 3
WHEA_ERROR_TYPE.WheaErrTypePCIXBus = 4
WHEA_ERROR_TYPE.WheaErrTypePCIXDevice = 5
WHEA_ERROR_TYPE.WheaErrTypeGeneric = 6
PROFILE_DEPARTURE_STYLE = v_enum()
PROFILE_DEPARTURE_STYLE.PDS_UPDATE_DEFAULT = 1
PROFILE_DEPARTURE_STYLE.PDS_UPDATE_ON_REMOVE = 2
PROFILE_DEPARTURE_STYLE.PDS_UPDATE_ON_INTERFACE = 3
PROFILE_DEPARTURE_STYLE.PDS_UPDATE_ON_EJECT = 4
OB_OPEN_REASON = v_enum()
OB_OPEN_REASON.ObCreateHandle = 0
OB_OPEN_REASON.ObOpenHandle = 1
OB_OPEN_REASON.ObDuplicateHandle = 2
OB_OPEN_REASON.ObInheritHandle = 3
OB_OPEN_REASON.ObMaxOpenReason = 4
CPU_VENDORS = v_enum()
CPU_VENDORS.CPU_NONE = 0
CPU_VENDORS.CPU_INTEL = 1
CPU_VENDORS.CPU_AMD = 2
CPU_VENDORS.CPU_CYRIX = 3
CPU_VENDORS.CPU_TRANSMETA = 4
CPU_VENDORS.CPU_VIA = 5
CPU_VENDORS.CPU_CENTAUR = 5
CPU_VENDORS.CPU_RISE = 6
CPU_VENDORS.CPU_UNKNOWN = 7
POWER_STATE_TYPE = v_enum()
POWER_STATE_TYPE.SystemPowerState = 0
POWER_STATE_TYPE.DevicePowerState = 1
TYPE_OF_MEMORY = v_enum()
TYPE_OF_MEMORY.LoaderExceptionBlock = 0
TYPE_OF_MEMORY.LoaderSystemBlock = 1
TYPE_OF_MEMORY.LoaderFree = 2
TYPE_OF_MEMORY.LoaderBad = 3
TYPE_OF_MEMORY.LoaderLoadedProgram = 4
TYPE_OF_MEMORY.LoaderFirmwareTemporary = 5
TYPE_OF_MEMORY.LoaderFirmwarePermanent = 6
TYPE_OF_MEMORY.LoaderOsloaderHeap = 7
TYPE_OF_MEMORY.LoaderOsloaderStack = 8
TYPE_OF_MEMORY.LoaderSystemCode = 9
TYPE_OF_MEMORY.LoaderHalCode = 10
TYPE_OF_MEMORY.LoaderBootDriver = 11
TYPE_OF_MEMORY.LoaderConsoleInDriver = 12
TYPE_OF_MEMORY.LoaderConsoleOutDriver = 13
TYPE_OF_MEMORY.LoaderStartupDpcStack = 14
TYPE_OF_MEMORY.LoaderStartupKernelStack = 15
TYPE_OF_MEMORY.LoaderStartupPanicStack = 16
TYPE_OF_MEMORY.LoaderStartupPcrPage = 17
TYPE_OF_MEMORY.LoaderStartupPdrPage = 18
TYPE_OF_MEMORY.LoaderRegistryData = 19
TYPE_OF_MEMORY.LoaderMemoryData = 20
TYPE_OF_MEMORY.LoaderNlsData = 21
TYPE_OF_MEMORY.LoaderSpecialMemory = 22
TYPE_OF_MEMORY.LoaderBBTMemory = 23
TYPE_OF_MEMORY.LoaderReserve = 24
TYPE_OF_MEMORY.LoaderXIPRom = 25
TYPE_OF_MEMORY.LoaderHALCachedMemory = 26
TYPE_OF_MEMORY.LoaderLargePageFiller = 27
TYPE_OF_MEMORY.LoaderErrorLogMemory = 28
TYPE_OF_MEMORY.LoaderMaximum = 29
ETW_NOTIFICATION_TYPE = v_enum()
ETW_NOTIFICATION_TYPE.EtwNotificationTypeNoReply = 1
ETW_NOTIFICATION_TYPE.EtwNotificationTypeLegacyEnable = 2
ETW_NOTIFICATION_TYPE.EtwNotificationTypeEnable = 3
ETW_NOTIFICATION_TYPE.EtwNotificationTypePrivateLogger = 4
ETW_NOTIFICATION_TYPE.EtwNotificationTypePerflib = 5
ETW_NOTIFICATION_TYPE.EtwNotificationTypeAudio = 6
ETW_NOTIFICATION_TYPE.EtwNotificationTypeSession = 7
ETW_NOTIFICATION_TYPE.EtwNotificationTypeReserved = 8
ETW_NOTIFICATION_TYPE.EtwNotificationTypeCredentialUI = 9
ETW_NOTIFICATION_TYPE.EtwNotificationTypeMax = 10
KTM_STATE = v_enum()
KTM_STATE.KKtmUninitialized = 0
KTM_STATE.KKtmInitialized = 1
KTM_STATE.KKtmRecovering = 2
KTM_STATE.KKtmOnline = 3
KTM_STATE.KKtmRecoveryFailed = 4
KTM_STATE.KKtmOffline = 5
PP_NPAGED_LOOKASIDE_NUMBER = v_enum()
PP_NPAGED_LOOKASIDE_NUMBER.LookasideSmallIrpList = 0
PP_NPAGED_LOOKASIDE_NUMBER.LookasideMediumIrpList = 1
PP_NPAGED_LOOKASIDE_NUMBER.LookasideLargeIrpList = 2
PP_NPAGED_LOOKASIDE_NUMBER.LookasideMdlList = 3
PP_NPAGED_LOOKASIDE_NUMBER.LookasideCreateInfoList = 4
PP_NPAGED_LOOKASIDE_NUMBER.LookasideNameBufferList = 5
PP_NPAGED_LOOKASIDE_NUMBER.LookasideTwilightList = 6
PP_NPAGED_LOOKASIDE_NUMBER.LookasideCompletionList = 7
PP_NPAGED_LOOKASIDE_NUMBER.LookasideScratchBufferList = 8
PP_NPAGED_LOOKASIDE_NUMBER.LookasideMaximumList = 9
PPM_IDLE_BUCKET_TIME_TYPE = v_enum()
PPM_IDLE_BUCKET_TIME_TYPE.PpmIdleBucketTimeInQpc = 0
PPM_IDLE_BUCKET_TIME_TYPE.PpmIdleBucketTimeIn100ns = 1
PPM_IDLE_BUCKET_TIME_TYPE.PpmIdleBucketTimeMaximum = 2
PLUGPLAY_EVENT_CATEGORY = v_enum()
PLUGPLAY_EVENT_CATEGORY.HardwareProfileChangeEvent = 0
PLUGPLAY_EVENT_CATEGORY.TargetDeviceChangeEvent = 1
PLUGPLAY_EVENT_CATEGORY.DeviceClassChangeEvent = 2
PLUGPLAY_EVENT_CATEGORY.CustomDeviceEvent = 3
PLUGPLAY_EVENT_CATEGORY.DeviceInstallEvent = 4
PLUGPLAY_EVENT_CATEGORY.DeviceArrivalEvent = 5
PLUGPLAY_EVENT_CATEGORY.VetoEvent = 6
PLUGPLAY_EVENT_CATEGORY.BlockedDriverEvent = 7
PLUGPLAY_EVENT_CATEGORY.InvalidIDEvent = 8
PLUGPLAY_EVENT_CATEGORY.DevicePropertyChangeEvent = 9
PLUGPLAY_EVENT_CATEGORY.DeviceInstanceRemovalEvent = 10
PLUGPLAY_EVENT_CATEGORY.DeviceInstanceStartedEvent = 11
PLUGPLAY_EVENT_CATEGORY.MaxPlugEventCategory = 12
IO_SESSION_STATE = v_enum()
IO_SESSION_STATE.IoSessionStateCreated = 1
IO_SESSION_STATE.IoSessionStateInitialized = 2
IO_SESSION_STATE.IoSessionStateConnected = 3
IO_SESSION_STATE.IoSessionStateDisconnected = 4
IO_SESSION_STATE.IoSessionStateDisconnectedLoggedOn = 5
IO_SESSION_STATE.IoSessionStateLoggedOn = 6
IO_SESSION_STATE.IoSessionStateLoggedOff = 7
IO_SESSION_STATE.IoSessionStateTerminated = 8
IO_SESSION_STATE.IoSessionStateMax = 9
PF_FILE_ACCESS_TYPE = v_enum()
PF_FILE_ACCESS_TYPE.PfFileAccessTypeRead = 0
PF_FILE_ACCESS_TYPE.PfFileAccessTypeWrite = 1
PF_FILE_ACCESS_TYPE.PfFileAccessTypeMax = 2
ARBITER_RESULT = v_enum()
ARBITER_RESULT.ArbiterResultUndefined = -1
ARBITER_RESULT.ArbiterResultSuccess = 0
ARBITER_RESULT.ArbiterResultExternalConflict = 1
ARBITER_RESULT.ArbiterResultNullRequest = 2
POWER_REQUEST_TYPE = v_enum()
POWER_REQUEST_TYPE.PowerRequestDisplayRequired = 0
POWER_REQUEST_TYPE.PowerRequestSystemRequired = 1
POWER_REQUEST_TYPE.PowerRequestAwayModeRequired = 2
POWER_REQUEST_TYPE.PowerRequestExecutionRequired = 3
POWER_REQUEST_TYPE_INTERNAL = v_enum()
POWER_REQUEST_TYPE_INTERNAL.PowerRequestDisplayRequiredInternal = 0
POWER_REQUEST_TYPE_INTERNAL.PowerRequestSystemRequiredInternal = 1
POWER_REQUEST_TYPE_INTERNAL.PowerRequestAwayModeRequiredInternal = 2
POWER_REQUEST_TYPE_INTERNAL.PowerRequestExecutionRequiredInternal = 3
POWER_REQUEST_TYPE_INTERNAL.PowerRequestPerfBoostRequiredInternal = 4
POWER_REQUEST_TYPE_INTERNAL.PowerRequestAudioAnyInternal = 5
POWER_REQUEST_TYPE_INTERNAL.PowerRequestAudioOffloadInternal = 6
POWER_REQUEST_TYPE_INTERNAL.PowerRequestVideoBatchingInternal = 7
POWER_REQUEST_TYPE_INTERNAL.PowerRequestFullScreenVideoInternal = 8
POWER_REQUEST_TYPE_INTERNAL.PowerRequestInternalInvalid = 9
POWER_ACTION = v_enum()
POWER_ACTION.PowerActionNone = 0
POWER_ACTION.PowerActionReserved = 1
POWER_ACTION.PowerActionSleep = 2
POWER_ACTION.PowerActionHibernate = 3
POWER_ACTION.PowerActionShutdown = 4
POWER_ACTION.PowerActionShutdownReset = 5
POWER_ACTION.PowerActionShutdownOff = 6
POWER_ACTION.PowerActionWarmEject = 7
ARBITER_REQUEST_SOURCE = v_enum()
ARBITER_REQUEST_SOURCE.ArbiterRequestUndefined = -1
ARBITER_REQUEST_SOURCE.ArbiterRequestLegacyReported = 0
ARBITER_REQUEST_SOURCE.ArbiterRequestHalReported = 1
ARBITER_REQUEST_SOURCE.ArbiterRequestLegacyAssigned = 2
ARBITER_REQUEST_SOURCE.ArbiterRequestPnpDetected = 3
ARBITER_REQUEST_SOURCE.ArbiterRequestPnpEnumerated = 4
KOBJECTS = v_enum()
KOBJECTS.EventNotificationObject = 0
KOBJECTS.EventSynchronizationObject = 1
KOBJECTS.MutantObject = 2
KOBJECTS.ProcessObject = 3
KOBJECTS.QueueObject = 4
KOBJECTS.SemaphoreObject = 5
KOBJECTS.ThreadObject = 6
KOBJECTS.GateObject = 7
KOBJECTS.TimerNotificationObject = 8
KOBJECTS.TimerSynchronizationObject = 9
KOBJECTS.Spare2Object = 10
KOBJECTS.Spare3Object = 11
KOBJECTS.Spare4Object = 12
KOBJECTS.Spare5Object = 13
KOBJECTS.Spare6Object = 14
KOBJECTS.Spare7Object = 15
KOBJECTS.Spare8Object = 16
KOBJECTS.ProfileCallbackObject = 17
KOBJECTS.ApcObject = 18
KOBJECTS.DpcObject = 19
KOBJECTS.DeviceQueueObject = 20
KOBJECTS.EventPairObject = 21
KOBJECTS.InterruptObject = 22
KOBJECTS.ProfileObject = 23
KOBJECTS.ThreadedDpcObject = 24
KOBJECTS.MaximumKernelObject = 25
CM_LOAD_FAILURE_TYPE = v_enum()
CM_LOAD_FAILURE_TYPE._None = 0
CM_LOAD_FAILURE_TYPE.CmInitializeHive = 1
CM_LOAD_FAILURE_TYPE.HvInitializeHive = 2
CM_LOAD_FAILURE_TYPE.HvpBuildMap = 3
CM_LOAD_FAILURE_TYPE.HvpBuildMapAndCopy = 4
CM_LOAD_FAILURE_TYPE.HvpInitMap = 5
CM_LOAD_FAILURE_TYPE.HvLoadHive = 6
CM_LOAD_FAILURE_TYPE.HvpReadFileImageAndBuildMap = 7
CM_LOAD_FAILURE_TYPE.HvpRecoverData = 8
CM_LOAD_FAILURE_TYPE.CmpValidateHiveSecurityDescriptors = 9
CM_LOAD_FAILURE_TYPE.HvpEnlistBinInMap = 10
CM_LOAD_FAILURE_TYPE.CmCheckRegistry = 11
CM_LOAD_FAILURE_TYPE.CmRegistryIO = 12
CM_LOAD_FAILURE_TYPE.CmCheckRegistry2 = 13
CM_LOAD_FAILURE_TYPE.CmpCheckKey = 14
CM_LOAD_FAILURE_TYPE.CmpCheckValueList = 15
CM_LOAD_FAILURE_TYPE.HvCheckHive = 16
CM_LOAD_FAILURE_TYPE.HvCheckBin = 17
ETW_BUFFER_STATE = v_enum()
ETW_BUFFER_STATE.EtwBufferStateFree = 0
ETW_BUFFER_STATE.EtwBufferStateGeneralLogging = 1
ETW_BUFFER_STATE.EtwBufferStateCSwitch = 2
ETW_BUFFER_STATE.EtwBufferStateFlush = 3
ETW_BUFFER_STATE.EtwBufferStateMaximum = 4
USER_ACTIVITY_PRESENCE = v_enum()
USER_ACTIVITY_PRESENCE.PowerUserPresent = 0
USER_ACTIVITY_PRESENCE.PowerUserNotPresent = 1
USER_ACTIVITY_PRESENCE.PowerUserInactive = 2
USER_ACTIVITY_PRESENCE.PowerUserMaximum = 3
USER_ACTIVITY_PRESENCE.PowerUserInvalid = 3
POWER_POLICY_DEVICE_TYPE = v_enum()
POWER_POLICY_DEVICE_TYPE.PolicyDeviceSystemButton = 0
POWER_POLICY_DEVICE_TYPE.PolicyDeviceThermalZone = 1
POWER_POLICY_DEVICE_TYPE.PolicyDeviceBattery = 2
POWER_POLICY_DEVICE_TYPE.PolicyDeviceMemory = 3
POWER_POLICY_DEVICE_TYPE.PolicyInitiatePowerActionAPI = 4
POWER_POLICY_DEVICE_TYPE.PolicySetPowerStateAPI = 5
POWER_POLICY_DEVICE_TYPE.PolicyImmediateDozeS4 = 6
POWER_POLICY_DEVICE_TYPE.PolicySystemIdle = 7
POWER_POLICY_DEVICE_TYPE.PolicyDeviceWakeAlarm = 8
POWER_POLICY_DEVICE_TYPE.PolicyDeviceMax = 9
UoWActionType = v_enum()
UoWActionType.UoWAddThisKey = 0
UoWActionType.UoWAddChildKey = 1
UoWActionType.UoWDeleteThisKey = 2
UoWActionType.UoWDeleteChildKey = 3
UoWActionType.UoWSetValueNew = 4
UoWActionType.UoWSetValueExisting = 5
UoWActionType.UoWDeleteValue = 6
UoWActionType.UoWSetKeyUserFlags = 7
UoWActionType.UoWSetLastWriteTime = 8
UoWActionType.UoWSetSecurityDescriptor = 9
UoWActionType.UoWRenameSubKey = 10
UoWActionType.UoWRenameOldSubKey = 11
UoWActionType.UoWRenameNewSubKey = 12
UoWActionType.UoWIsolation = 13
UoWActionType.UoWInvalid = 14
PERFINFO_MM_STAT = v_enum()
PERFINFO_MM_STAT.PerfInfoMMStatNotUsed = 0
PERFINFO_MM_STAT.PerfInfoMMStatAggregatePageCombine = 1
PERFINFO_MM_STAT.PerfInfoMMStatIterationPageCombine = 2
PERFINFO_MM_STAT.PerfInfoMMStatMax = 3
WHEA_ERROR_PACKET_DATA_FORMAT = v_enum()
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatIPFSalRecord = 0
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatXPFMCA = 1
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatMemory = 2
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatPCIExpress = 3
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatNMIPort = 4
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatPCIXBus = 5
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatPCIXDevice = 6
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatGeneric = 7
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatMax = 8
DPFLTR_TYPE = v_enum()
DPFLTR_TYPE.DPFLTR_SYSTEM_ID = 0
DPFLTR_TYPE.DPFLTR_SMSS_ID = 1
DPFLTR_TYPE.DPFLTR_SETUP_ID = 2
DPFLTR_TYPE.DPFLTR_NTFS_ID = 3
DPFLTR_TYPE.DPFLTR_FSTUB_ID = 4
DPFLTR_TYPE.DPFLTR_CRASHDUMP_ID = 5
DPFLTR_TYPE.DPFLTR_CDAUDIO_ID = 6
DPFLTR_TYPE.DPFLTR_CDROM_ID = 7
DPFLTR_TYPE.DPFLTR_CLASSPNP_ID = 8
DPFLTR_TYPE.DPFLTR_DISK_ID = 9
DPFLTR_TYPE.DPFLTR_REDBOOK_ID = 10
DPFLTR_TYPE.DPFLTR_STORPROP_ID = 11
DPFLTR_TYPE.DPFLTR_SCSIPORT_ID = 12
DPFLTR_TYPE.DPFLTR_SCSIMINIPORT_ID = 13
DPFLTR_TYPE.DPFLTR_CONFIG_ID = 14
DPFLTR_TYPE.DPFLTR_I8042PRT_ID = 15
DPFLTR_TYPE.DPFLTR_SERMOUSE_ID = 16
DPFLTR_TYPE.DPFLTR_LSERMOUS_ID = 17
DPFLTR_TYPE.DPFLTR_KBDHID_ID = 18
DPFLTR_TYPE.DPFLTR_MOUHID_ID = 19
DPFLTR_TYPE.DPFLTR_KBDCLASS_ID = 20
DPFLTR_TYPE.DPFLTR_MOUCLASS_ID = 21
DPFLTR_TYPE.DPFLTR_TWOTRACK_ID = 22
DPFLTR_TYPE.DPFLTR_WMILIB_ID = 23
DPFLTR_TYPE.DPFLTR_ACPI_ID = 24
DPFLTR_TYPE.DPFLTR_AMLI_ID = 25
DPFLTR_TYPE.DPFLTR_HALIA64_ID = 26
DPFLTR_TYPE.DPFLTR_VIDEO_ID = 27
DPFLTR_TYPE.DPFLTR_SVCHOST_ID = 28
DPFLTR_TYPE.DPFLTR_VIDEOPRT_ID = 29
DPFLTR_TYPE.DPFLTR_TCPIP_ID = 30
DPFLTR_TYPE.DPFLTR_DMSYNTH_ID = 31
DPFLTR_TYPE.DPFLTR_NTOSPNP_ID = 32
DPFLTR_TYPE.DPFLTR_FASTFAT_ID = 33
DPFLTR_TYPE.DPFLTR_SAMSS_ID = 34
DPFLTR_TYPE.DPFLTR_PNPMGR_ID = 35
DPFLTR_TYPE.DPFLTR_NETAPI_ID = 36
DPFLTR_TYPE.DPFLTR_SCSERVER_ID = 37
DPFLTR_TYPE.DPFLTR_SCCLIENT_ID = 38
DPFLTR_TYPE.DPFLTR_SERIAL_ID = 39
DPFLTR_TYPE.DPFLTR_SERENUM_ID = 40
DPFLTR_TYPE.DPFLTR_UHCD_ID = 41
DPFLTR_TYPE.DPFLTR_RPCPROXY_ID = 42
DPFLTR_TYPE.DPFLTR_AUTOCHK_ID = 43
DPFLTR_TYPE.DPFLTR_DCOMSS_ID = 44
DPFLTR_TYPE.DPFLTR_UNIMODEM_ID = 45
DPFLTR_TYPE.DPFLTR_SIS_ID = 46
DPFLTR_TYPE.DPFLTR_FLTMGR_ID = 47
DPFLTR_TYPE.DPFLTR_WMICORE_ID = 48
DPFLTR_TYPE.DPFLTR_BURNENG_ID = 49
DPFLTR_TYPE.DPFLTR_IMAPI_ID = 50
DPFLTR_TYPE.DPFLTR_SXS_ID = 51
DPFLTR_TYPE.DPFLTR_FUSION_ID = 52
DPFLTR_TYPE.DPFLTR_IDLETASK_ID = 53
DPFLTR_TYPE.DPFLTR_SOFTPCI_ID = 54
DPFLTR_TYPE.DPFLTR_TAPE_ID = 55
DPFLTR_TYPE.DPFLTR_MCHGR_ID = 56
DPFLTR_TYPE.DPFLTR_IDEP_ID = 57
DPFLTR_TYPE.DPFLTR_PCIIDE_ID = 58
DPFLTR_TYPE.DPFLTR_FLOPPY_ID = 59
DPFLTR_TYPE.DPFLTR_FDC_ID = 60
DPFLTR_TYPE.DPFLTR_TERMSRV_ID = 61
DPFLTR_TYPE.DPFLTR_W32TIME_ID = 62
DPFLTR_TYPE.DPFLTR_PREFETCHER_ID = 63
DPFLTR_TYPE.DPFLTR_RSFILTER_ID = 64
DPFLTR_TYPE.DPFLTR_FCPORT_ID = 65
DPFLTR_TYPE.DPFLTR_PCI_ID = 66
DPFLTR_TYPE.DPFLTR_DMIO_ID = 67
DPFLTR_TYPE.DPFLTR_DMCONFIG_ID = 68
DPFLTR_TYPE.DPFLTR_DMADMIN_ID = 69
DPFLTR_TYPE.DPFLTR_WSOCKTRANSPORT_ID = 70
DPFLTR_TYPE.DPFLTR_VSS_ID = 71
DPFLTR_TYPE.DPFLTR_PNPMEM_ID = 72
DPFLTR_TYPE.DPFLTR_PROCESSOR_ID = 73
DPFLTR_TYPE.DPFLTR_DMSERVER_ID = 74
DPFLTR_TYPE.DPFLTR_SR_ID = 75
DPFLTR_TYPE.DPFLTR_INFINIBAND_ID = 76
DPFLTR_TYPE.DPFLTR_IHVDRIVER_ID = 77
DPFLTR_TYPE.DPFLTR_IHVVIDEO_ID = 78
DPFLTR_TYPE.DPFLTR_IHVAUDIO_ID = 79
DPFLTR_TYPE.DPFLTR_IHVNETWORK_ID = 80
DPFLTR_TYPE.DPFLTR_IHVSTREAMING_ID = 81
DPFLTR_TYPE.DPFLTR_IHVBUS_ID = 82
DPFLTR_TYPE.DPFLTR_HPS_ID = 83
DPFLTR_TYPE.DPFLTR_RTLTHREADPOOL_ID = 84
DPFLTR_TYPE.DPFLTR_LDR_ID = 85
DPFLTR_TYPE.DPFLTR_TCPIP6_ID = 86
DPFLTR_TYPE.DPFLTR_ISAPNP_ID = 87
DPFLTR_TYPE.DPFLTR_SHPC_ID = 88
DPFLTR_TYPE.DPFLTR_STORPORT_ID = 89
DPFLTR_TYPE.DPFLTR_STORMINIPORT_ID = 90
DPFLTR_TYPE.DPFLTR_PRINTSPOOLER_ID = 91
DPFLTR_TYPE.DPFLTR_VSSDYNDISK_ID = 92
DPFLTR_TYPE.DPFLTR_VERIFIER_ID = 93
DPFLTR_TYPE.DPFLTR_VDS_ID = 94
DPFLTR_TYPE.DPFLTR_VDSBAS_ID = 95
DPFLTR_TYPE.DPFLTR_VDSDYN_ID = 96
DPFLTR_TYPE.DPFLTR_VDSDYNDR_ID = 97
DPFLTR_TYPE.DPFLTR_VDSLDR_ID = 98
DPFLTR_TYPE.DPFLTR_VDSUTIL_ID = 99
DPFLTR_TYPE.DPFLTR_DFRGIFC_ID = 100
DPFLTR_TYPE.DPFLTR_DEFAULT_ID = 101
DPFLTR_TYPE.DPFLTR_MM_ID = 102
DPFLTR_TYPE.DPFLTR_DFSC_ID = 103
DPFLTR_TYPE.DPFLTR_WOW64_ID = 104
DPFLTR_TYPE.DPFLTR_ALPC_ID = 105
DPFLTR_TYPE.DPFLTR_WDI_ID = 106
DPFLTR_TYPE.DPFLTR_PERFLIB_ID = 107
DPFLTR_TYPE.DPFLTR_KTM_ID = 108
DPFLTR_TYPE.DPFLTR_IOSTRESS_ID = 109
DPFLTR_TYPE.DPFLTR_HEAP_ID = 110
DPFLTR_TYPE.DPFLTR_WHEA_ID = 111
DPFLTR_TYPE.DPFLTR_USERGDI_ID = 112
DPFLTR_TYPE.DPFLTR_MMCSS_ID = 113
DPFLTR_TYPE.DPFLTR_TPM_ID = 114
DPFLTR_TYPE.DPFLTR_THREADORDER_ID = 115
DPFLTR_TYPE.DPFLTR_ENVIRON_ID = 116
DPFLTR_TYPE.DPFLTR_EMS_ID = 117
DPFLTR_TYPE.DPFLTR_WDT_ID = 118
DPFLTR_TYPE.DPFLTR_FVEVOL_ID = 119
DPFLTR_TYPE.DPFLTR_NDIS_ID = 120
DPFLTR_TYPE.DPFLTR_NVCTRACE_ID = 121
DPFLTR_TYPE.DPFLTR_LUAFV_ID = 122
DPFLTR_TYPE.DPFLTR_APPCOMPAT_ID = 123
DPFLTR_TYPE.DPFLTR_USBSTOR_ID = 124
DPFLTR_TYPE.DPFLTR_SBP2PORT_ID = 125
DPFLTR_TYPE.DPFLTR_COVERAGE_ID = 126
DPFLTR_TYPE.DPFLTR_CACHEMGR_ID = 127
DPFLTR_TYPE.DPFLTR_MOUNTMGR_ID = 128
DPFLTR_TYPE.DPFLTR_CFR_ID = 129
DPFLTR_TYPE.DPFLTR_TXF_ID = 130
DPFLTR_TYPE.DPFLTR_KSECDD_ID = 131
DPFLTR_TYPE.DPFLTR_FLTREGRESS_ID = 132
DPFLTR_TYPE.DPFLTR_MPIO_ID = 133
DPFLTR_TYPE.DPFLTR_MSDSM_ID = 134
DPFLTR_TYPE.DPFLTR_UDFS_ID = 135
DPFLTR_TYPE.DPFLTR_PSHED_ID = 136
DPFLTR_TYPE.DPFLTR_STORVSP_ID = 137
DPFLTR_TYPE.DPFLTR_LSASS_ID = 138
DPFLTR_TYPE.DPFLTR_SSPICLI_ID = 139
DPFLTR_TYPE.DPFLTR_CNG_ID = 140
DPFLTR_TYPE.DPFLTR_EXFAT_ID = 141
DPFLTR_TYPE.DPFLTR_FILETRACE_ID = 142
DPFLTR_TYPE.DPFLTR_XSAVE_ID = 143
DPFLTR_TYPE.DPFLTR_SE_ID = 144
DPFLTR_TYPE.DPFLTR_DRIVEEXTENDER_ID = 145
DPFLTR_TYPE.DPFLTR_POWER_ID = 146
DPFLTR_TYPE.DPFLTR_CRASHDUMPXHCI_ID = 147
DPFLTR_TYPE.DPFLTR_GPIO_ID = 148
DPFLTR_TYPE.DPFLTR_REFS_ID = 149
DPFLTR_TYPE.DPFLTR_ENDOFTABLE_ID = 150
DMA_SPEED = v_enum()
DMA_SPEED.Compatible = 0
DMA_SPEED.TypeA = 1
DMA_SPEED.TypeB = 2
DMA_SPEED.TypeC = 3
DMA_SPEED.TypeF = 4
DMA_SPEED.MaximumDmaSpeed = 5
IO_PRIORITY_HINT = v_enum()
IO_PRIORITY_HINT.IoPriorityVeryLow = 0
IO_PRIORITY_HINT.IoPriorityLow = 1
IO_PRIORITY_HINT.IoPriorityNormal = 2
IO_PRIORITY_HINT.IoPriorityHigh = 3
IO_PRIORITY_HINT.IoPriorityCritical = 4
IO_PRIORITY_HINT.MaxIoPriorityTypes = 5
SYSTEM_POWER_CONDITION = v_enum()
SYSTEM_POWER_CONDITION.PoAc = 0
SYSTEM_POWER_CONDITION.PoDc = 1
SYSTEM_POWER_CONDITION.PoHot = 2
SYSTEM_POWER_CONDITION.PoConditionMaximum = 3
KTRANSACTION_OUTCOME = v_enum()
KTRANSACTION_OUTCOME.KTxOutcomeUninitialized = 0
KTRANSACTION_OUTCOME.KTxOutcomeUndetermined = 1
KTRANSACTION_OUTCOME.KTxOutcomeCommitted = 2
KTRANSACTION_OUTCOME.KTxOutcomeAborted = 3
KTRANSACTION_OUTCOME.KTxOutcomeUnavailable = 4
KENLISTMENT_STATE = v_enum()
KENLISTMENT_STATE.KEnlistmentUninitialized = 0
KENLISTMENT_STATE.KEnlistmentActive = 256
KENLISTMENT_STATE.KEnlistmentPreparing = 257
KENLISTMENT_STATE.KEnlistmentPrepared = 258
KENLISTMENT_STATE.KEnlistmentInDoubt = 259
KENLISTMENT_STATE.KEnlistmentCommitted = 260
KENLISTMENT_STATE.KEnlistmentCommittedNotify = 261
KENLISTMENT_STATE.KEnlistmentCommitRequested = 262
KENLISTMENT_STATE.KEnlistmentAborted = 263
KENLISTMENT_STATE.KEnlistmentDelegated = 264
KENLISTMENT_STATE.KEnlistmentDelegatedDisconnected = 265
KENLISTMENT_STATE.KEnlistmentPrePreparing = 266
KENLISTMENT_STATE.KEnlistmentForgotten = 267
KENLISTMENT_STATE.KEnlistmentRecovering = 268
KENLISTMENT_STATE.KEnlistmentAborting = 269
KENLISTMENT_STATE.KEnlistmentReadOnly = 270
KENLISTMENT_STATE.KEnlistmentOutcomeUnavailable = 271
KENLISTMENT_STATE.KEnlistmentOffline = 272
KENLISTMENT_STATE.KEnlistmentPrePrepared = 273
KENLISTMENT_STATE.KEnlistmentInitialized = 274
SE_WS_APPX_SIGNATURE_ORIGIN = v_enum()
SE_WS_APPX_SIGNATURE_ORIGIN.SE_WS_APPX_SIGNATURE_ORIGIN_NOT_VALIDATED = 0
SE_WS_APPX_SIGNATURE_ORIGIN.SE_WS_APPX_SIGNATURE_ORIGIN_UNKNOWN = 1
SE_WS_APPX_SIGNATURE_ORIGIN.SE_WS_APPX_SIGNATURE_ORIGIN_APPSTORE = 2
SE_WS_APPX_SIGNATURE_ORIGIN.SE_WS_APPX_SIGNATURE_ORIGIN_WINDOWS = 3
DMA_WIDTH = v_enum()
DMA_WIDTH.Width8Bits = 0
DMA_WIDTH.Width16Bits = 1
DMA_WIDTH.Width32Bits = 2
DMA_WIDTH.Width64Bits = 3
DMA_WIDTH.WidthNoWrap = 4
DMA_WIDTH.MaximumDmaWidth = 5
EX_POOL_PRIORITY = v_enum()
EX_POOL_PRIORITY.LowPoolPriority = 0
EX_POOL_PRIORITY.LowPoolPrioritySpecialPoolOverrun = 8
EX_POOL_PRIORITY.LowPoolPrioritySpecialPoolUnderrun = 9
EX_POOL_PRIORITY.NormalPoolPriority = 16
EX_POOL_PRIORITY.NormalPoolPrioritySpecialPoolOverrun = 24
EX_POOL_PRIORITY.NormalPoolPrioritySpecialPoolUnderrun = 25
EX_POOL_PRIORITY.HighPoolPriority = 32
EX_POOL_PRIORITY.HighPoolPrioritySpecialPoolOverrun = 40
EX_POOL_PRIORITY.HighPoolPrioritySpecialPoolUnderrun = 41
DUMP_EVENTS = v_enum()
DUMP_EVENTS.DUMP_EVENT_NONE = 0
DUMP_EVENTS.DUMP_EVENT_HIBER_RESUME = 1
DUMP_EVENTS.DUMP_EVENT_HIBER_RESUME_END = 2
KINTERRUPT_POLARITY = v_enum()
KINTERRUPT_POLARITY.InterruptPolarityUnknown = 0
KINTERRUPT_POLARITY.InterruptActiveHigh = 1
KINTERRUPT_POLARITY.InterruptRisingEdge = 1
KINTERRUPT_POLARITY.InterruptActiveLow = 2
KINTERRUPT_POLARITY.InterruptFallingEdge = 2
KINTERRUPT_POLARITY.InterruptActiveBoth = 3
PNP_VETO_TYPE = v_enum()
PNP_VETO_TYPE.PNP_VetoTypeUnknown = 0
PNP_VETO_TYPE.PNP_VetoLegacyDevice = 1
PNP_VETO_TYPE.PNP_VetoPendingClose = 2
PNP_VETO_TYPE.PNP_VetoWindowsApp = 3
PNP_VETO_TYPE.PNP_VetoWindowsService = 4
PNP_VETO_TYPE.PNP_VetoOutstandingOpen = 5
PNP_VETO_TYPE.PNP_VetoDevice = 6
PNP_VETO_TYPE.PNP_VetoDriver = 7
PNP_VETO_TYPE.PNP_VetoIllegalDeviceRequest = 8
PNP_VETO_TYPE.PNP_VetoInsufficientPower = 9
PNP_VETO_TYPE.PNP_VetoNonDisableable = 10
PNP_VETO_TYPE.PNP_VetoLegacyDriver = 11
PNP_VETO_TYPE.PNP_VetoInsufficientRights = 12
LDR_DLL_LOAD_REASON = v_enum()
LDR_DLL_LOAD_REASON.LoadReasonStaticDependency = 0
LDR_DLL_LOAD_REASON.LoadReasonStaticForwarderDependency = 1
LDR_DLL_LOAD_REASON.LoadReasonDynamicForwarderDependency = 2
LDR_DLL_LOAD_REASON.LoadReasonDelayloadDependency = 3
LDR_DLL_LOAD_REASON.LoadReasonDynamicLoad = 4
LDR_DLL_LOAD_REASON.LoadReasonAsImageLoad = 5
LDR_DLL_LOAD_REASON.LoadReasonAsDataLoad = 6
LDR_DLL_LOAD_REASON.LoadReasonUnknown = -1
KTHREAD_STATE = v_enum()
KTHREAD_STATE.Initialized = 0
KTHREAD_STATE.Ready = 1
KTHREAD_STATE.Running = 2
KTHREAD_STATE.Standby = 3
KTHREAD_STATE.Terminated = 4
KTHREAD_STATE.Waiting = 5
KTHREAD_STATE.Transition = 6
KTHREAD_STATE.DeferredReady = 7
KTHREAD_STATE.GateWaitObsolete = 8
DEVPROP_OPERATOR = v_enum()
DEVPROP_OPERATOR.DEVPROP_OPERATOR_MODIFIER_NOT = 65536
DEVPROP_OPERATOR.DEVPROP_OPERATOR_MODIFIER_IGNORE_CASE = 131072
DEVPROP_OPERATOR.DEVPROP_OPERATOR_NONE = 0
DEVPROP_OPERATOR.DEVPROP_OPERATOR_EXISTS = 1
DEVPROP_OPERATOR.DEVPROP_OPERATOR_EQUALS = 2
DEVPROP_OPERATOR.DEVPROP_OPERATOR_NOT_EQUALS = 65538
DEVPROP_OPERATOR.DEVPROP_OPERATOR_GREATER_THAN = 3
DEVPROP_OPERATOR.DEVPROP_OPERATOR_LESS_THAN = 4
DEVPROP_OPERATOR.DEVPROP_OPERATOR_GREATER_THAN_EQUALS = 5
DEVPROP_OPERATOR.DEVPROP_OPERATOR_LESS_THAN_EQUALS = 6
DEVPROP_OPERATOR.DEVPROP_OPERATOR_EQUALS_IGNORE_CASE = 131074
DEVPROP_OPERATOR.DEVPROP_OPERATOR_NOT_EQUALS_IGNORE_CASE = 196610
DEVPROP_OPERATOR.DEVPROP_OPERATOR_BITWISE_AND = 7
DEVPROP_OPERATOR.DEVPROP_OPERATOR_BITWISE_OR = 8
DEVPROP_OPERATOR.DEVPROP_OPERATOR_LIST_CONTAINS = 4096
DEVPROP_OPERATOR.DEVPROP_OPERATOR_LIST_CONTAINS_IGNORE_CASE = 135168
DEVPROP_OPERATOR.DEVPROP_OPERATOR_AND_OPEN = 1048576
DEVPROP_OPERATOR.DEVPROP_OPERATOR_AND_CLOSE = 2097152
DEVPROP_OPERATOR.DEVPROP_OPERATOR_OR_OPEN = 3145728
DEVPROP_OPERATOR.DEVPROP_OPERATOR_OR_CLOSE = 4194304
DEVPROP_OPERATOR.DEVPROP_OPERATOR_NOT_OPEN = 5242880
DEVPROP_OPERATOR.DEVPROP_OPERATOR_NOT_CLOSE = 6291456
DEVPROP_OPERATOR.DEVPROP_OPERATOR_MASK_EVAL = 4095
DEVPROP_OPERATOR.DEVPROP_OPERATOR_MASK_LIST = 61440
DEVPROP_OPERATOR.DEVPROP_OPERATOR_MASK_MODIFIER = 983040
DEVPROP_OPERATOR.DEVPROP_OPERATOR_MASK_NOT_LOGICAL = 1048575
DEVPROP_OPERATOR.DEVPROP_OPERATOR_MASK_LOGICAL = -1048576
SECURITY_IMPERSONATION_LEVEL = v_enum()
SECURITY_IMPERSONATION_LEVEL.SecurityAnonymous = 0
SECURITY_IMPERSONATION_LEVEL.SecurityIdentification = 1
SECURITY_IMPERSONATION_LEVEL.SecurityImpersonation = 2
SECURITY_IMPERSONATION_LEVEL.SecurityDelegation = 3
TOKEN_INFORMATION_CLASS = v_enum()
TOKEN_INFORMATION_CLASS.TokenUser = 1
TOKEN_INFORMATION_CLASS.TokenGroups = 2
TOKEN_INFORMATION_CLASS.TokenPrivileges = 3
TOKEN_INFORMATION_CLASS.TokenOwner = 4
TOKEN_INFORMATION_CLASS.TokenPrimaryGroup = 5
TOKEN_INFORMATION_CLASS.TokenDefaultDacl = 6
TOKEN_INFORMATION_CLASS.TokenSource = 7
TOKEN_INFORMATION_CLASS.TokenType = 8
TOKEN_INFORMATION_CLASS.TokenImpersonationLevel = 9
TOKEN_INFORMATION_CLASS.TokenStatistics = 10
TOKEN_INFORMATION_CLASS.TokenRestrictedSids = 11
TOKEN_INFORMATION_CLASS.TokenSessionId = 12
TOKEN_INFORMATION_CLASS.TokenGroupsAndPrivileges = 13
TOKEN_INFORMATION_CLASS.TokenSessionReference = 14
TOKEN_INFORMATION_CLASS.TokenSandBoxInert = 15
TOKEN_INFORMATION_CLASS.TokenAuditPolicy = 16
TOKEN_INFORMATION_CLASS.TokenOrigin = 17
TOKEN_INFORMATION_CLASS.TokenElevationType = 18
TOKEN_INFORMATION_CLASS.TokenLinkedToken = 19
TOKEN_INFORMATION_CLASS.TokenElevation = 20
TOKEN_INFORMATION_CLASS.TokenHasRestrictions = 21
TOKEN_INFORMATION_CLASS.TokenAccessInformation = 22
TOKEN_INFORMATION_CLASS.TokenVirtualizationAllowed = 23
TOKEN_INFORMATION_CLASS.TokenVirtualizationEnabled = 24
TOKEN_INFORMATION_CLASS.TokenIntegrityLevel = 25
TOKEN_INFORMATION_CLASS.TokenUIAccess = 26
TOKEN_INFORMATION_CLASS.TokenMandatoryPolicy = 27
TOKEN_INFORMATION_CLASS.TokenLogonSid = 28
TOKEN_INFORMATION_CLASS.TokenIsAppContainer = 29
TOKEN_INFORMATION_CLASS.TokenCapabilities = 30
TOKEN_INFORMATION_CLASS.TokenAppContainerSid = 31
TOKEN_INFORMATION_CLASS.TokenAppContainerNumber = 32
TOKEN_INFORMATION_CLASS.TokenUserClaimAttributes = 33
TOKEN_INFORMATION_CLASS.TokenDeviceClaimAttributes = 34
TOKEN_INFORMATION_CLASS.TokenRestrictedUserClaimAttributes = 35
TOKEN_INFORMATION_CLASS.TokenRestrictedDeviceClaimAttributes = 36
TOKEN_INFORMATION_CLASS.TokenDeviceGroups = 37
TOKEN_INFORMATION_CLASS.TokenRestrictedDeviceGroups = 38
TOKEN_INFORMATION_CLASS.TokenSecurityAttributes = 39
TOKEN_INFORMATION_CLASS.TokenIsRestricted = 40
TOKEN_INFORMATION_CLASS.MaxTokenInfoClass = 41
KRESOURCEMANAGER_STATE = v_enum()
KRESOURCEMANAGER_STATE.KResourceManagerUninitialized = 0
KRESOURCEMANAGER_STATE.KResourceManagerOffline = 1
KRESOURCEMANAGER_STATE.KResourceManagerOnline = 2
ALTERNATIVE_ARCHITECTURE_TYPE = v_enum()
ALTERNATIVE_ARCHITECTURE_TYPE.StandardDesign = 0
ALTERNATIVE_ARCHITECTURE_TYPE.NEC98x86 = 1
ALTERNATIVE_ARCHITECTURE_TYPE.EndAlternatives = 2
PCW_CALLBACK_TYPE = v_enum()
PCW_CALLBACK_TYPE.PcwCallbackAddCounter = 0
PCW_CALLBACK_TYPE.PcwCallbackRemoveCounter = 1
PCW_CALLBACK_TYPE.PcwCallbackEnumerateInstances = 2
PCW_CALLBACK_TYPE.PcwCallbackCollectData = 3
REQUESTER_TYPE = v_enum()
REQUESTER_TYPE.KernelRequester = 0
REQUESTER_TYPE.UserProcessRequester = 1
REQUESTER_TYPE.UserSharedServiceRequester = 2
JOBOBJECTINFOCLASS = v_enum()
JOBOBJECTINFOCLASS.JobObjectBasicAccountingInformation = 1
JOBOBJECTINFOCLASS.JobObjectBasicLimitInformation = 2
JOBOBJECTINFOCLASS.JobObjectBasicProcessIdList = 3
JOBOBJECTINFOCLASS.JobObjectBasicUIRestrictions = 4
JOBOBJECTINFOCLASS.JobObjectSecurityLimitInformation = 5
JOBOBJECTINFOCLASS.JobObjectEndOfJobTimeInformation = 6
JOBOBJECTINFOCLASS.JobObjectAssociateCompletionPortInformation = 7
JOBOBJECTINFOCLASS.JobObjectBasicAndIoAccountingInformation = 8
JOBOBJECTINFOCLASS.JobObjectExtendedLimitInformation = 9
JOBOBJECTINFOCLASS.JobObjectJobSetInformation = 10
JOBOBJECTINFOCLASS.JobObjectGroupInformation = 11
JOBOBJECTINFOCLASS.JobObjectNotificationLimitInformation = 12
JOBOBJECTINFOCLASS.JobObjectLimitViolationInformation = 13
JOBOBJECTINFOCLASS.JobObjectGroupInformationEx = 14
JOBOBJECTINFOCLASS.JobObjectCpuRateControlInformation = 15
JOBOBJECTINFOCLASS.JobObjectCompletionFilter = 16
JOBOBJECTINFOCLASS.JobObjectCompletionCounter = 17
JOBOBJECTINFOCLASS.JobObjectFreezeInformation = 18
JOBOBJECTINFOCLASS.JobObjectExtendedAccountingInformation = 19
JOBOBJECTINFOCLASS.JobObjectWakeInformation = 20
JOBOBJECTINFOCLASS.JobObjectBackgroundInformation = 21
JOBOBJECTINFOCLASS.JobObjectSchedulingRankBiasInformation = 22
JOBOBJECTINFOCLASS.JobObjectTimerVirtualizationInformation = 23
JOBOBJECTINFOCLASS.JobObjectCycleTimeNotification = 24
JOBOBJECTINFOCLASS.JobObjectClearEvent = 25
JOBOBJECTINFOCLASS.JobObjectReserved1Information = 18
JOBOBJECTINFOCLASS.JobObjectReserved2Information = 19
JOBOBJECTINFOCLASS.JobObjectReserved3Information = 20
JOBOBJECTINFOCLASS.JobObjectReserved4Information = 21
JOBOBJECTINFOCLASS.JobObjectReserved5Information = 22
JOBOBJECTINFOCLASS.JobObjectReserved6Information = 23
JOBOBJECTINFOCLASS.JobObjectReserved7Information = 24
JOBOBJECTINFOCLASS.JobObjectReserved8Information = 25
JOBOBJECTINFOCLASS.MaxJobObjectInfoClass = 26
SYSTEM_POWER_STATE = v_enum()
SYSTEM_POWER_STATE.PowerSystemUnspecified = 0
SYSTEM_POWER_STATE.PowerSystemWorking = 1
SYSTEM_POWER_STATE.PowerSystemSleeping1 = 2
SYSTEM_POWER_STATE.PowerSystemSleeping2 = 3
SYSTEM_POWER_STATE.PowerSystemSleeping3 = 4
SYSTEM_POWER_STATE.PowerSystemHibernate = 5
SYSTEM_POWER_STATE.PowerSystemShutdown = 6
SYSTEM_POWER_STATE.PowerSystemMaximum = 7
MEMORY_CACHING_TYPE_ORIG = v_enum()
MEMORY_CACHING_TYPE_ORIG.MmFrameBufferCached = 2
PROFILE_STATUS = v_enum()
PROFILE_STATUS.DOCK_NOTDOCKDEVICE = 0
PROFILE_STATUS.DOCK_QUIESCENT = 1
PROFILE_STATUS.DOCK_ARRIVING = 2
PROFILE_STATUS.DOCK_DEPARTING = 3
PROFILE_STATUS.DOCK_EJECTIRP_COMPLETED = 4
MM_POOL_PRIORITIES = v_enum()
MM_POOL_PRIORITIES.MmHighPriority = 0
MM_POOL_PRIORITIES.MmNormalPriority = 1
MM_POOL_PRIORITIES.MmLowPriority = 2
MM_POOL_PRIORITIES.MmMaximumPoolPriority = 3
BLOB_ID = v_enum()
BLOB_ID.BLOB_TYPE_UNKNOWN = 0
BLOB_ID.BLOB_TYPE_CONNECTION_INFO = 1
BLOB_ID.BLOB_TYPE_MESSAGE = 2
BLOB_ID.BLOB_TYPE_SECURITY_CONTEXT = 3
BLOB_ID.BLOB_TYPE_SECTION = 4
BLOB_ID.BLOB_TYPE_REGION = 5
BLOB_ID.BLOB_TYPE_VIEW = 6
BLOB_ID.BLOB_TYPE_RESERVE = 7
BLOB_ID.BLOB_TYPE_DIRECT_TRANSFER = 8
BLOB_ID.BLOB_TYPE_HANDLE_DATA = 9
BLOB_ID.BLOB_TYPE_MAX_ID = 10
WHEA_ERROR_SOURCE_STATE = v_enum()
WHEA_ERROR_SOURCE_STATE.WheaErrSrcStateStopped = 1
WHEA_ERROR_SOURCE_STATE.WheaErrSrcStateStarted = 2
REG_NOTIFY_CLASS = v_enum()
REG_NOTIFY_CLASS.RegNtDeleteKey = 0
REG_NOTIFY_CLASS.RegNtPreDeleteKey = 0
REG_NOTIFY_CLASS.RegNtSetValueKey = 1
REG_NOTIFY_CLASS.RegNtPreSetValueKey = 1
REG_NOTIFY_CLASS.RegNtDeleteValueKey = 2
REG_NOTIFY_CLASS.RegNtPreDeleteValueKey = 2
REG_NOTIFY_CLASS.RegNtSetInformationKey = 3
REG_NOTIFY_CLASS.RegNtPreSetInformationKey = 3
REG_NOTIFY_CLASS.RegNtRenameKey = 4
REG_NOTIFY_CLASS.RegNtPreRenameKey = 4
REG_NOTIFY_CLASS.RegNtEnumerateKey = 5
REG_NOTIFY_CLASS.RegNtPreEnumerateKey = 5
REG_NOTIFY_CLASS.RegNtEnumerateValueKey = 6
REG_NOTIFY_CLASS.RegNtPreEnumerateValueKey = 6
REG_NOTIFY_CLASS.RegNtQueryKey = 7
REG_NOTIFY_CLASS.RegNtPreQueryKey = 7
REG_NOTIFY_CLASS.RegNtQueryValueKey = 8
REG_NOTIFY_CLASS.RegNtPreQueryValueKey = 8
REG_NOTIFY_CLASS.RegNtQueryMultipleValueKey = 9
REG_NOTIFY_CLASS.RegNtPreQueryMultipleValueKey = 9
REG_NOTIFY_CLASS.RegNtPreCreateKey = 10
REG_NOTIFY_CLASS.RegNtPostCreateKey = 11
REG_NOTIFY_CLASS.RegNtPreOpenKey = 12
REG_NOTIFY_CLASS.RegNtPostOpenKey = 13
REG_NOTIFY_CLASS.RegNtKeyHandleClose = 14
REG_NOTIFY_CLASS.RegNtPreKeyHandleClose = 14
REG_NOTIFY_CLASS.RegNtPostDeleteKey = 15
REG_NOTIFY_CLASS.RegNtPostSetValueKey = 16
REG_NOTIFY_CLASS.RegNtPostDeleteValueKey = 17
REG_NOTIFY_CLASS.RegNtPostSetInformationKey = 18
REG_NOTIFY_CLASS.RegNtPostRenameKey = 19
REG_NOTIFY_CLASS.RegNtPostEnumerateKey = 20
REG_NOTIFY_CLASS.RegNtPostEnumerateValueKey = 21
REG_NOTIFY_CLASS.RegNtPostQueryKey = 22
REG_NOTIFY_CLASS.RegNtPostQueryValueKey = 23
REG_NOTIFY_CLASS.RegNtPostQueryMultipleValueKey = 24
REG_NOTIFY_CLASS.RegNtPostKeyHandleClose = 25
REG_NOTIFY_CLASS.RegNtPreCreateKeyEx = 26
REG_NOTIFY_CLASS.RegNtPostCreateKeyEx = 27
REG_NOTIFY_CLASS.RegNtPreOpenKeyEx = 28
REG_NOTIFY_CLASS.RegNtPostOpenKeyEx = 29
REG_NOTIFY_CLASS.RegNtPreFlushKey = 30
REG_NOTIFY_CLASS.RegNtPostFlushKey = 31
REG_NOTIFY_CLASS.RegNtPreLoadKey = 32
REG_NOTIFY_CLASS.RegNtPostLoadKey = 33
REG_NOTIFY_CLASS.RegNtPreUnLoadKey = 34
REG_NOTIFY_CLASS.RegNtPostUnLoadKey = 35
REG_NOTIFY_CLASS.RegNtPreQueryKeySecurity = 36
REG_NOTIFY_CLASS.RegNtPostQueryKeySecurity = 37
REG_NOTIFY_CLASS.RegNtPreSetKeySecurity = 38
REG_NOTIFY_CLASS.RegNtPostSetKeySecurity = 39
REG_NOTIFY_CLASS.RegNtCallbackObjectContextCleanup = 40
REG_NOTIFY_CLASS.RegNtPreRestoreKey = 41
REG_NOTIFY_CLASS.RegNtPostRestoreKey = 42
REG_NOTIFY_CLASS.RegNtPreSaveKey = 43
REG_NOTIFY_CLASS.RegNtPostSaveKey = 44
REG_NOTIFY_CLASS.RegNtPreReplaceKey = 45
REG_NOTIFY_CLASS.RegNtPostReplaceKey = 46
REG_NOTIFY_CLASS.MaxRegNtNotifyClass = 47
MM_POOL_FAILURE_REASONS = v_enum()
MM_POOL_FAILURE_REASONS.MmNonPagedNoPtes = 0
MM_POOL_FAILURE_REASONS.MmPriorityTooLow = 1
MM_POOL_FAILURE_REASONS.MmNonPagedNoPagesAvailable = 2
MM_POOL_FAILURE_REASONS.MmPagedNoPtes = 3
MM_POOL_FAILURE_REASONS.MmSessionPagedNoPtes = 4
MM_POOL_FAILURE_REASONS.MmPagedNoPagesAvailable = 5
MM_POOL_FAILURE_REASONS.MmSessionPagedNoPagesAvailable = 6
MM_POOL_FAILURE_REASONS.MmPagedNoCommit = 7
MM_POOL_FAILURE_REASONS.MmSessionPagedNoCommit = 8
MM_POOL_FAILURE_REASONS.MmNonPagedNoResidentAvailable = 9
MM_POOL_FAILURE_REASONS.MmNonPagedNoCommit = 10
MM_POOL_FAILURE_REASONS.MmMaximumFailureReason = 11
BUS_QUERY_ID_TYPE = v_enum()
BUS_QUERY_ID_TYPE.BusQueryDeviceID = 0
BUS_QUERY_ID_TYPE.BusQueryHardwareIDs = 1
BUS_QUERY_ID_TYPE.BusQueryCompatibleIDs = 2
BUS_QUERY_ID_TYPE.BusQueryInstanceID = 3
BUS_QUERY_ID_TYPE.BusQueryDeviceSerialNumber = 4
BUS_QUERY_ID_TYPE.BusQueryContainerID = 5
PROC_HYPERVISOR_STATE = v_enum()
PROC_HYPERVISOR_STATE.ProcHypervisorNone = 0
PROC_HYPERVISOR_STATE.ProcHypervisorPresent = 1
PROC_HYPERVISOR_STATE.ProcHypervisorPower = 2
MM_PREEMPTIVE_TRIMS = v_enum()
MM_PREEMPTIVE_TRIMS.MmPreemptForNonPaged = 0
MM_PREEMPTIVE_TRIMS.MmPreemptForPaged = 1
MM_PREEMPTIVE_TRIMS.MmPreemptForNonPagedPriority = 2
MM_PREEMPTIVE_TRIMS.MmPreemptForPagedPriority = 3
MM_PREEMPTIVE_TRIMS.MmMaximumPreempt = 4
TRACE_INFORMATION_CLASS = v_enum()
TRACE_INFORMATION_CLASS.TraceIdClass = 0
TRACE_INFORMATION_CLASS.TraceHandleClass = 1
TRACE_INFORMATION_CLASS.TraceEnableFlagsClass = 2
TRACE_INFORMATION_CLASS.TraceEnableLevelClass = 3
TRACE_INFORMATION_CLASS.GlobalLoggerHandleClass = 4
TRACE_INFORMATION_CLASS.EventLoggerHandleClass = 5
TRACE_INFORMATION_CLASS.AllLoggerHandlesClass = 6
TRACE_INFORMATION_CLASS.TraceHandleByNameClass = 7
TRACE_INFORMATION_CLASS.LoggerEventsLostClass = 8
TRACE_INFORMATION_CLASS.TraceSessionSettingsClass = 9
TRACE_INFORMATION_CLASS.LoggerEventsLoggedClass = 10
TRACE_INFORMATION_CLASS.DiskIoNotifyRoutinesClass = 11
TRACE_INFORMATION_CLASS.TraceInformationClassReserved1 = 12
TRACE_INFORMATION_CLASS.AllPossibleNotifyRoutinesClass = 12
TRACE_INFORMATION_CLASS.FltIoNotifyRoutinesClass = 13
TRACE_INFORMATION_CLASS.TraceInformationClassReserved2 = 14
TRACE_INFORMATION_CLASS.HypervisorStackwalkRoutineClass = 14
TRACE_INFORMATION_CLASS.WdfNotifyRoutinesClass = 15
TRACE_INFORMATION_CLASS.MaxTraceInformationClass = 16
WHEA_ERROR_SEVERITY = v_enum()
WHEA_ERROR_SEVERITY.WheaErrSevRecoverable = 0
WHEA_ERROR_SEVERITY.WheaErrSevFatal = 1
WHEA_ERROR_SEVERITY.WheaErrSevCorrected = 2
WHEA_ERROR_SEVERITY.WheaErrSevInformational = 3
VI_DEADLOCK_RESOURCE_TYPE = v_enum()
VI_DEADLOCK_RESOURCE_TYPE.VfDeadlockUnknown = 0
VI_DEADLOCK_RESOURCE_TYPE.VfDeadlockMutex = 1
VI_DEADLOCK_RESOURCE_TYPE.VfDeadlockMutexAbandoned = 2
VI_DEADLOCK_RESOURCE_TYPE.VfDeadlockFastMutex = 3
VI_DEADLOCK_RESOURCE_TYPE.VfDeadlockFastMutexUnsafe = 4
VI_DEADLOCK_RESOURCE_TYPE.VfDeadlockSpinLock = 5
VI_DEADLOCK_RESOURCE_TYPE.VfDeadlockInStackQueuedSpinLock = 6
VI_DEADLOCK_RESOURCE_TYPE.VfDeadlockUnusedSpinLock = 7
VI_DEADLOCK_RESOURCE_TYPE.VfDeadlockEresource = 8
VI_DEADLOCK_RESOURCE_TYPE.VfDeadlockTypeMaximum = 9
KWAIT_STATE = v_enum()
KWAIT_STATE.WaitInProgress = 0
KWAIT_STATE.WaitCommitted = 1
KWAIT_STATE.WaitAborted = 2
KWAIT_STATE.MaximumWaitState = 3
OBJECT_INFORMATION_CLASS = v_enum()
OBJECT_INFORMATION_CLASS.ObjectBasicInformation = 0
OBJECT_INFORMATION_CLASS.ObjectNameInformation = 1
OBJECT_INFORMATION_CLASS.ObjectTypeInformation = 2
OBJECT_INFORMATION_CLASS.ObjectTypesInformation = 3
OBJECT_INFORMATION_CLASS.ObjectHandleFlagInformation = 4
OBJECT_INFORMATION_CLASS.ObjectSessionInformation = 5
OBJECT_INFORMATION_CLASS.MaxObjectInfoClass = 6
ARBITER_ACTION = v_enum()
ARBITER_ACTION.ArbiterActionTestAllocation = 0
ARBITER_ACTION.ArbiterActionRetestAllocation = 1
ARBITER_ACTION.ArbiterActionCommitAllocation = 2
ARBITER_ACTION.ArbiterActionRollbackAllocation = 3
ARBITER_ACTION.ArbiterActionQueryAllocatedResources = 4
ARBITER_ACTION.ArbiterActionWriteReservedResources = 5
ARBITER_ACTION.ArbiterActionQueryConflict = 6
ARBITER_ACTION.ArbiterActionQueryArbitrate = 7
ARBITER_ACTION.ArbiterActionAddReserved = 8
ARBITER_ACTION.ArbiterActionBootAllocation = 9
PROCESS_VA_TYPE = v_enum()
PROCESS_VA_TYPE.ProcessVAImage = 0
PROCESS_VA_TYPE.ProcessVASection = 1
PROCESS_VA_TYPE.ProcessVAPrivate = 2
PROCESS_VA_TYPE.ProcessVAMax = 3
HEAP_FAILURE_TYPE = v_enum()
HEAP_FAILURE_TYPE.heap_failure_internal = 0
HEAP_FAILURE_TYPE.heap_failure_unknown = 1
HEAP_FAILURE_TYPE.heap_failure_generic = 2
HEAP_FAILURE_TYPE.heap_failure_entry_corruption = 3
HEAP_FAILURE_TYPE.heap_failure_multiple_entries_corruption = 4
HEAP_FAILURE_TYPE.heap_failure_virtual_block_corruption = 5
HEAP_FAILURE_TYPE.heap_failure_buffer_overrun = 6
HEAP_FAILURE_TYPE.heap_failure_buffer_underrun = 7
HEAP_FAILURE_TYPE.heap_failure_block_not_busy = 8
HEAP_FAILURE_TYPE.heap_failure_invalid_argument = 9
HEAP_FAILURE_TYPE.heap_failure_usage_after_free = 10
HEAP_FAILURE_TYPE.heap_failure_cross_heap_operation = 11
HEAP_FAILURE_TYPE.heap_failure_freelists_corruption = 12
HEAP_FAILURE_TYPE.heap_failure_listentry_corruption = 13
HEAP_FAILURE_TYPE.heap_failure_lfh_bitmap_mismatch = 14
MM_POOL_TYPES = v_enum()
MM_POOL_TYPES.MmNonPagedPool = 0
MM_POOL_TYPES.MmPagedPool = 1
MM_POOL_TYPES.MmSessionPagedPool = 2
MM_POOL_TYPES.MmMaximumPoolType = 3
POP_DEVICE_IDLE_TYPE = v_enum()
POP_DEVICE_IDLE_TYPE.DeviceIdleNormal = 0
POP_DEVICE_IDLE_TYPE.DeviceIdleDisk = 1
PS_WAKE_REASON = v_enum()
PS_WAKE_REASON.PsWakeReasonUser = 0
PS_WAKE_REASON.PsWakeReasonExecutionRequired = 1
PS_WAKE_REASON.PsWakeReasonKernel = 2
PS_WAKE_REASON.PsWakeReasonInstrumentation = 3
PS_WAKE_REASON.PsMaxWakeReasons = 4
WORK_QUEUE_TYPE = v_enum()
WORK_QUEUE_TYPE.CriticalWorkQueue = 0
WORK_QUEUE_TYPE.DelayedWorkQueue = 1
WORK_QUEUE_TYPE.HyperCriticalWorkQueue = 2
WORK_QUEUE_TYPE.NormalWorkQueue = 3
WORK_QUEUE_TYPE.BackgroundWorkQueue = 4
WORK_QUEUE_TYPE.RealTimeWorkQueue = 5
WORK_QUEUE_TYPE.SuperCriticalWorkQueue = 6
WORK_QUEUE_TYPE.MaximumWorkQueue = 7
WORK_QUEUE_TYPE.CustomPriorityWorkQueue = 32
KTRANSACTION_STATE = v_enum()
KTRANSACTION_STATE.KTransactionUninitialized = 0
KTRANSACTION_STATE.KTransactionActive = 1
KTRANSACTION_STATE.KTransactionPreparing = 2
KTRANSACTION_STATE.KTransactionPrepared = 3
KTRANSACTION_STATE.KTransactionInDoubt = 4
KTRANSACTION_STATE.KTransactionCommitted = 5
KTRANSACTION_STATE.KTransactionAborted = 6
KTRANSACTION_STATE.KTransactionDelegated = 7
KTRANSACTION_STATE.KTransactionPrePreparing = 8
KTRANSACTION_STATE.KTransactionForgotten = 9
KTRANSACTION_STATE.KTransactionRecovering = 10
KTRANSACTION_STATE.KTransactionPrePrepared = 11
EXCEPTION_DISPOSITION = v_enum()
EXCEPTION_DISPOSITION.ExceptionContinueExecution = 0
EXCEPTION_DISPOSITION.ExceptionContinueSearch = 1
EXCEPTION_DISPOSITION.ExceptionNestedException = 2
EXCEPTION_DISPOSITION.ExceptionCollidedUnwind = 3
SECURITY_OPERATION_CODE = v_enum()
SECURITY_OPERATION_CODE.SetSecurityDescriptor = 0
SECURITY_OPERATION_CODE.QuerySecurityDescriptor = 1
SECURITY_OPERATION_CODE.DeleteSecurityDescriptor = 2
SECURITY_OPERATION_CODE.AssignSecurityDescriptor = 3
IRPLOCK = v_enum()
IRPLOCK.IRPLOCK_CANCELABLE = 0
IRPLOCK.IRPLOCK_CANCEL_STARTED = 1
IRPLOCK.IRPLOCK_CANCEL_COMPLETE = 2
IRPLOCK.IRPLOCK_COMPLETED = 3
FS_FILTER_STREAM_FO_NOTIFICATION_TYPE = v_enum()
FS_FILTER_STREAM_FO_NOTIFICATION_TYPE.NotifyTypeCreate = 0
FS_FILTER_STREAM_FO_NOTIFICATION_TYPE.NotifyTypeRetired = 1
_unnamed_36553 = v_enum()
_unnamed_36553.KTMOH_CommitTransaction_Result = 1
_unnamed_36553.KTMOH_RollbackTransaction_Result = 2
DEVICE_USAGE_NOTIFICATION_TYPE = v_enum()
DEVICE_USAGE_NOTIFICATION_TYPE.DeviceUsageTypeUndefined = 0
DEVICE_USAGE_NOTIFICATION_TYPE.DeviceUsageTypePaging = 1
DEVICE_USAGE_NOTIFICATION_TYPE.DeviceUsageTypeHibernation = 2
DEVICE_USAGE_NOTIFICATION_TYPE.DeviceUsageTypeDumpFile = 3
DEVICE_USAGE_NOTIFICATION_TYPE.DeviceUsageTypeBoot = 4
DEVICE_USAGE_NOTIFICATION_TYPE.DeviceUsageTypePostDisplay = 5
ETW_HEADER_TYPE = v_enum()
ETW_HEADER_TYPE.EtwHeaderTypeNative = 0
ETW_HEADER_TYPE.EtwHeaderTypeMax = 1
INTERFACE_TYPE = v_enum()
INTERFACE_TYPE.InterfaceTypeUndefined = -1
INTERFACE_TYPE.Internal = 0
INTERFACE_TYPE.Isa = 1
INTERFACE_TYPE.Eisa = 2
INTERFACE_TYPE.MicroChannel = 3
INTERFACE_TYPE.TurboChannel = 4
INTERFACE_TYPE.PCIBus = 5
INTERFACE_TYPE.VMEBus = 6
INTERFACE_TYPE.NuBus = 7
INTERFACE_TYPE.PCMCIABus = 8
INTERFACE_TYPE.CBus = 9
INTERFACE_TYPE.MPIBus = 10
INTERFACE_TYPE.MPSABus = 11
INTERFACE_TYPE.ProcessorInternal = 12
INTERFACE_TYPE.InternalPowerBus = 13
INTERFACE_TYPE.PNPISABus = 14
INTERFACE_TYPE.PNPBus = 15
INTERFACE_TYPE.Vmcs = 16
INTERFACE_TYPE.ACPIBus = 17
INTERFACE_TYPE.MaximumInterfaceType = 18
KWAIT_REASON = v_enum()
KWAIT_REASON.Executive = 0
KWAIT_REASON.FreePage = 1
KWAIT_REASON.PageIn = 2
KWAIT_REASON.PoolAllocation = 3
KWAIT_REASON.DelayExecution = 4
KWAIT_REASON.Suspended = 5
KWAIT_REASON.UserRequest = 6
KWAIT_REASON.WrExecutive = 7
KWAIT_REASON.WrFreePage = 8
KWAIT_REASON.WrPageIn = 9
KWAIT_REASON.WrPoolAllocation = 10
KWAIT_REASON.WrDelayExecution = 11
KWAIT_REASON.WrSuspended = 12
KWAIT_REASON.WrUserRequest = 13
KWAIT_REASON.WrEventPair = 14
KWAIT_REASON.WrQueue = 15
KWAIT_REASON.WrLpcReceive = 16
KWAIT_REASON.WrLpcReply = 17
KWAIT_REASON.WrVirtualMemory = 18
KWAIT_REASON.WrPageOut = 19
KWAIT_REASON.WrRendezvous = 20
KWAIT_REASON.WrKeyedEvent = 21
KWAIT_REASON.WrTerminated = 22
KWAIT_REASON.WrProcessInSwap = 23
KWAIT_REASON.WrCpuRateControl = 24
KWAIT_REASON.WrCalloutStack = 25
KWAIT_REASON.WrKernel = 26
KWAIT_REASON.WrResource = 27
KWAIT_REASON.WrPushLock = 28
KWAIT_REASON.WrMutex = 29
KWAIT_REASON.WrQuantumEnd = 30
KWAIT_REASON.WrDispatchInt = 31
KWAIT_REASON.WrPreempted = 32
KWAIT_REASON.WrYieldExecution = 33
KWAIT_REASON.WrFastMutex = 34
KWAIT_REASON.WrGuardedMutex = 35
KWAIT_REASON.WrRundown = 36
KWAIT_REASON.WrAlertByThreadId = 37
KWAIT_REASON.WrDeferredPreempt = 38
KWAIT_REASON.MaximumWaitReason = 39
PS_RESOURCE_TYPE = v_enum()
PS_RESOURCE_TYPE.PsResourceNonPagedPool = 0
PS_RESOURCE_TYPE.PsResourcePagedPool = 1
PS_RESOURCE_TYPE.PsResourcePageFile = 2
PS_RESOURCE_TYPE.PsResourceWorkingSet = 3
PS_RESOURCE_TYPE.PsResourceCpuRate = 4
PS_RESOURCE_TYPE.PsResourceMax = 5
MM_PAGE_ACCESS_TYPE = v_enum()
MM_PAGE_ACCESS_TYPE.MmPteAccessType = 0
MM_PAGE_ACCESS_TYPE.MmCcReadAheadType = 1
MM_PAGE_ACCESS_TYPE.MmPfnRepurposeType = 2
MM_PAGE_ACCESS_TYPE.MmMaximumPageAccessType = 3
ReplacesCorHdrNumericDefines = v_enum()
ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_ILONLY = 1
ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_32BITREQUIRED = 2
ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_IL_LIBRARY = 4
ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_STRONGNAMESIGNED = 8
ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_NATIVE_ENTRYPOINT = 16
ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_TRACKDEBUGDATA = 65536
ReplacesCorHdrNumericDefines.COR_VERSION_MAJOR_V2 = 2
ReplacesCorHdrNumericDefines.COR_VERSION_MAJOR = 2
ReplacesCorHdrNumericDefines.COR_VERSION_MINOR = 5
ReplacesCorHdrNumericDefines.COR_DELETED_NAME_LENGTH = 8
ReplacesCorHdrNumericDefines.COR_VTABLEGAP_NAME_LENGTH = 8
ReplacesCorHdrNumericDefines.NATIVE_TYPE_MAX_CB = 1
ReplacesCorHdrNumericDefines.COR_ILMETHOD_SECT_SMALL_MAX_DATASIZE = 255
ReplacesCorHdrNumericDefines.IMAGE_COR_MIH_METHODRVA = 1
ReplacesCorHdrNumericDefines.IMAGE_COR_MIH_EHRVA = 2
ReplacesCorHdrNumericDefines.IMAGE_COR_MIH_BASICBLOCK = 8
ReplacesCorHdrNumericDefines.COR_VTABLE_32BIT = 1
ReplacesCorHdrNumericDefines.COR_VTABLE_64BIT = 2
ReplacesCorHdrNumericDefines.COR_VTABLE_FROM_UNMANAGED = 4
ReplacesCorHdrNumericDefines.COR_VTABLE_FROM_UNMANAGED_RETAIN_APPDOMAIN = 8
ReplacesCorHdrNumericDefines.COR_VTABLE_CALL_MOST_DERIVED = 16
ReplacesCorHdrNumericDefines.IMAGE_COR_EATJ_THUNK_SIZE = 32
ReplacesCorHdrNumericDefines.MAX_CLASS_NAME = 1024
ReplacesCorHdrNumericDefines.MAX_PACKAGE_NAME = 1024
HSTORAGE_TYPE = v_enum()
HSTORAGE_TYPE.Stable = 0
HSTORAGE_TYPE.Volatile = 1
HSTORAGE_TYPE.InvalidStorage = 2
MI_PFN_CACHE_ATTRIBUTE = v_enum()
MI_PFN_CACHE_ATTRIBUTE.MiNonCached = 0
MI_PFN_CACHE_ATTRIBUTE.MiCached = 1
MI_PFN_CACHE_ATTRIBUTE.MiWriteCombined = 2
MI_PFN_CACHE_ATTRIBUTE.MiNotMapped = 3
CREATE_FILE_TYPE = v_enum()
CREATE_FILE_TYPE.CreateFileTypeNone = 0
CREATE_FILE_TYPE.CreateFileTypeNamedPipe = 1
CREATE_FILE_TYPE.CreateFileTypeMailslot = 2
POLICY_AUDIT_EVENT_TYPE = v_enum()
POLICY_AUDIT_EVENT_TYPE.AuditCategorySystem = 0
POLICY_AUDIT_EVENT_TYPE.AuditCategoryLogon = 1
POLICY_AUDIT_EVENT_TYPE.AuditCategoryObjectAccess = 2
POLICY_AUDIT_EVENT_TYPE.AuditCategoryPrivilegeUse = 3
POLICY_AUDIT_EVENT_TYPE.AuditCategoryDetailedTracking = 4
POLICY_AUDIT_EVENT_TYPE.AuditCategoryPolicyChange = 5
POLICY_AUDIT_EVENT_TYPE.AuditCategoryAccountManagement = 6
POLICY_AUDIT_EVENT_TYPE.AuditCategoryDirectoryServiceAccess = 7
POLICY_AUDIT_EVENT_TYPE.AuditCategoryAccountLogon = 8
ETW_RT_EVENT_LOSS = v_enum()
ETW_RT_EVENT_LOSS.EtwRtEventNoLoss = 0
ETW_RT_EVENT_LOSS.EtwRtEventLost = 1
ETW_RT_EVENT_LOSS.EtwRtBufferLost = 2
ETW_RT_EVENT_LOSS.EtwRtBackupLost = 3
ETW_RT_EVENT_LOSS.EtwRtEventLossMax = 4
DEVICE_WAKE_DEPTH = v_enum()
DEVICE_WAKE_DEPTH.DeviceWakeDepthNotWakeable = 0
DEVICE_WAKE_DEPTH.DeviceWakeDepthD0 = 1
DEVICE_WAKE_DEPTH.DeviceWakeDepthD1 = 2
DEVICE_WAKE_DEPTH.DeviceWakeDepthD2 = 3
DEVICE_WAKE_DEPTH.DeviceWakeDepthD3hot = 4
DEVICE_WAKE_DEPTH.DeviceWakeDepthD3cold = 5
DEVICE_WAKE_DEPTH.DeviceWakeDepthMaximum = 6
POP_IO_STATUS = v_enum()
POP_IO_STATUS.IoReady = 0
POP_IO_STATUS.IoPending = 1
POP_IO_STATUS.IoDone = 2
WOW64_SHARED_INFORMATION = v_enum()
WOW64_SHARED_INFORMATION.SharedNtdll32LdrInitializeThunk = 0
WOW64_SHARED_INFORMATION.SharedNtdll32KiUserExceptionDispatcher = 1
WOW64_SHARED_INFORMATION.SharedNtdll32KiUserApcDispatcher = 2
WOW64_SHARED_INFORMATION.SharedNtdll32KiUserCallbackDispatcher = 3
WOW64_SHARED_INFORMATION.SharedNtdll32LdrHotPatchRoutine = 4
WOW64_SHARED_INFORMATION.SharedNtdll32ExpInterlockedPopEntrySListFault = 5
WOW64_SHARED_INFORMATION.SharedNtdll32ExpInterlockedPopEntrySListResume = 6
WOW64_SHARED_INFORMATION.SharedNtdll32ExpInterlockedPopEntrySListEnd = 7
WOW64_SHARED_INFORMATION.SharedNtdll32RtlUserThreadStart = 8
WOW64_SHARED_INFORMATION.SharedNtdll32pQueryProcessDebugInformationRemote = 9
WOW64_SHARED_INFORMATION.SharedNtdll32EtwpNotificationThread = 10
WOW64_SHARED_INFORMATION.SharedNtdll32BaseAddress = 11
WOW64_SHARED_INFORMATION.SharedNtdll32RtlpWnfNotificationThread = 12
WOW64_SHARED_INFORMATION.SharedNtdll32LdrSystemDllInitBlock = 13
WOW64_SHARED_INFORMATION.Wow64SharedPageEntriesCount = 14
PNP_DEVICE_ACTION_REQUEST = v_enum()
PNP_DEVICE_ACTION_REQUEST.AssignResources = 0
PNP_DEVICE_ACTION_REQUEST.ClearDeviceProblem = 1
PNP_DEVICE_ACTION_REQUEST.ClearProblem = 2
PNP_DEVICE_ACTION_REQUEST.ClearEjectProblem = 3
PNP_DEVICE_ACTION_REQUEST.HaltDevice = 4
PNP_DEVICE_ACTION_REQUEST.QueryPowerRelations = 5
PNP_DEVICE_ACTION_REQUEST.Rebalance = 6
PNP_DEVICE_ACTION_REQUEST.ReenumerateBootDevices = 7
PNP_DEVICE_ACTION_REQUEST.ReenumerateDeviceOnly = 8
PNP_DEVICE_ACTION_REQUEST.ReenumerateDeviceTree = 9
PNP_DEVICE_ACTION_REQUEST.ReenumerateRootDevices = 10
PNP_DEVICE_ACTION_REQUEST.RequeryDeviceState = 11
PNP_DEVICE_ACTION_REQUEST.ResetDevice = 12
PNP_DEVICE_ACTION_REQUEST.ResourceRequirementsChanged = 13
PNP_DEVICE_ACTION_REQUEST.RestartEnumeration = 14
PNP_DEVICE_ACTION_REQUEST.SetDeviceProblem = 15
PNP_DEVICE_ACTION_REQUEST.StartDevice = 16
PNP_DEVICE_ACTION_REQUEST.StartSystemDevicesPass0 = 17
PNP_DEVICE_ACTION_REQUEST.StartSystemDevicesPass1 = 18
PNP_DEVICE_ACTION_REQUEST.NotifyTransportRelationsChange = 19
PNP_DEVICE_ACTION_REQUEST.NotifyEjectionRelationsChange = 20
PNP_DEVICE_ACTION_REQUEST.ConfigureDevice = 21
PNP_DEVICE_ACTION_REQUEST.ConfigureDeviceClass = 22
DEVICE_RELATION_TYPE = v_enum()
DEVICE_RELATION_TYPE.BusRelations = 0
DEVICE_RELATION_TYPE.EjectionRelations = 1
DEVICE_RELATION_TYPE.PowerRelations = 2
DEVICE_RELATION_TYPE.RemovalRelations = 3
DEVICE_RELATION_TYPE.TargetDeviceRelation = 4
DEVICE_RELATION_TYPE.SingleBusRelations = 5
DEVICE_RELATION_TYPE.TransportRelations = 6
FILE_INFORMATION_CLASS = v_enum()
FILE_INFORMATION_CLASS.FileDirectoryInformation = 1
FILE_INFORMATION_CLASS.FileFullDirectoryInformation = 2
FILE_INFORMATION_CLASS.FileBothDirectoryInformation = 3
FILE_INFORMATION_CLASS.FileBasicInformation = 4
FILE_INFORMATION_CLASS.FileStandardInformation = 5
FILE_INFORMATION_CLASS.FileInternalInformation = 6
FILE_INFORMATION_CLASS.FileEaInformation = 7
FILE_INFORMATION_CLASS.FileAccessInformation = 8
FILE_INFORMATION_CLASS.FileNameInformation = 9
FILE_INFORMATION_CLASS.FileRenameInformation = 10
FILE_INFORMATION_CLASS.FileLinkInformation = 11
FILE_INFORMATION_CLASS.FileNamesInformation = 12
FILE_INFORMATION_CLASS.FileDispositionInformation = 13
FILE_INFORMATION_CLASS.FilePositionInformation = 14
FILE_INFORMATION_CLASS.FileFullEaInformation = 15
FILE_INFORMATION_CLASS.FileModeInformation = 16
FILE_INFORMATION_CLASS.FileAlignmentInformation = 17
FILE_INFORMATION_CLASS.FileAllInformation = 18
FILE_INFORMATION_CLASS.FileAllocationInformation = 19
FILE_INFORMATION_CLASS.FileEndOfFileInformation = 20
FILE_INFORMATION_CLASS.FileAlternateNameInformation = 21
FILE_INFORMATION_CLASS.FileStreamInformation = 22
FILE_INFORMATION_CLASS.FilePipeInformation = 23
FILE_INFORMATION_CLASS.FilePipeLocalInformation = 24
FILE_INFORMATION_CLASS.FilePipeRemoteInformation = 25
FILE_INFORMATION_CLASS.FileMailslotQueryInformation = 26
FILE_INFORMATION_CLASS.FileMailslotSetInformation = 27
FILE_INFORMATION_CLASS.FileCompressionInformation = 28
FILE_INFORMATION_CLASS.FileObjectIdInformation = 29
FILE_INFORMATION_CLASS.FileCompletionInformation = 30
FILE_INFORMATION_CLASS.FileMoveClusterInformation = 31
FILE_INFORMATION_CLASS.FileQuotaInformation = 32
FILE_INFORMATION_CLASS.FileReparsePointInformation = 33
FILE_INFORMATION_CLASS.FileNetworkOpenInformation = 34
FILE_INFORMATION_CLASS.FileAttributeTagInformation = 35
FILE_INFORMATION_CLASS.FileTrackingInformation = 36
FILE_INFORMATION_CLASS.FileIdBothDirectoryInformation = 37
FILE_INFORMATION_CLASS.FileIdFullDirectoryInformation = 38
FILE_INFORMATION_CLASS.FileValidDataLengthInformation = 39
FILE_INFORMATION_CLASS.FileShortNameInformation = 40
FILE_INFORMATION_CLASS.FileIoCompletionNotificationInformation = 41
FILE_INFORMATION_CLASS.FileIoStatusBlockRangeInformation = 42
FILE_INFORMATION_CLASS.FileIoPriorityHintInformation = 43
FILE_INFORMATION_CLASS.FileSfioReserveInformation = 44
FILE_INFORMATION_CLASS.FileSfioVolumeInformation = 45
FILE_INFORMATION_CLASS.FileHardLinkInformation = 46
FILE_INFORMATION_CLASS.FileProcessIdsUsingFileInformation = 47
FILE_INFORMATION_CLASS.FileNormalizedNameInformation = 48
FILE_INFORMATION_CLASS.FileNetworkPhysicalNameInformation = 49
FILE_INFORMATION_CLASS.FileIdGlobalTxDirectoryInformation = 50
FILE_INFORMATION_CLASS.FileIsRemoteDeviceInformation = 51
FILE_INFORMATION_CLASS.FileAttributeCacheInformation = 52
FILE_INFORMATION_CLASS.FileNumaNodeInformation = 53
FILE_INFORMATION_CLASS.FileStandardLinkInformation = 54
FILE_INFORMATION_CLASS.FileRemoteProtocolInformation = 55
FILE_INFORMATION_CLASS.FileRenameInformationBypassAccessCheck = 56
FILE_INFORMATION_CLASS.FileLinkInformationBypassAccessCheck = 57
FILE_INFORMATION_CLASS.FileVolumeNameInformation = 58
FILE_INFORMATION_CLASS.FileIdInformation = 59
FILE_INFORMATION_CLASS.FileIdExtdDirectoryInformation = 60
FILE_INFORMATION_CLASS.FileMaximumInformation = 61
DEVICE_POWER_STATE = v_enum()
DEVICE_POWER_STATE.PowerDeviceUnspecified = 0
DEVICE_POWER_STATE.PowerDeviceD0 = 1
DEVICE_POWER_STATE.PowerDeviceD1 = 2
DEVICE_POWER_STATE.PowerDeviceD2 = 3
DEVICE_POWER_STATE.PowerDeviceD3 = 4
DEVICE_POWER_STATE.PowerDeviceMaximum = 5
MEMORY_CACHING_TYPE = v_enum()
MEMORY_CACHING_TYPE.MmNonCached = 0
MEMORY_CACHING_TYPE.MmCached = 1
MEMORY_CACHING_TYPE.MmWriteCombined = 2
MEMORY_CACHING_TYPE.MmHardwareCoherentCached = 3
MEMORY_CACHING_TYPE.MmNonCachedUnordered = 4
MEMORY_CACHING_TYPE.MmUSWCCached = 5
MEMORY_CACHING_TYPE.MmMaximumCacheType = 6
NT_PRODUCT_TYPE = v_enum()
NT_PRODUCT_TYPE.NtProductWinNt = 1
NT_PRODUCT_TYPE.NtProductLanManNt = 2
NT_PRODUCT_TYPE.NtProductServer = 3
IOP_PRIORITY_HINT = v_enum()
IOP_PRIORITY_HINT.IopIoPriorityNotSet = 0
IOP_PRIORITY_HINT.IopIoPriorityVeryLow = 1
IOP_PRIORITY_HINT.IopIoPriorityLow = 2
IOP_PRIORITY_HINT.IopIoPriorityNormal = 3
IOP_PRIORITY_HINT.IopIoPriorityHigh = 4
IOP_PRIORITY_HINT.IopIoPriorityCritical = 5
IOP_PRIORITY_HINT.MaxIopIoPriorityTypes = 6
WHEA_ERROR_SOURCE_TYPE = v_enum()
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeMCE = 0
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeCMC = 1
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeCPE = 2
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeNMI = 3
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypePCIe = 4
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeGeneric = 5
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeINIT = 6
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeBOOT = 7
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeSCIGeneric = 8
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeIPFMCA = 9
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeIPFCMC = 10
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeIPFCPE = 11
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeMax = 12
RTL_GENERIC_COMPARE_RESULTS = v_enum()
RTL_GENERIC_COMPARE_RESULTS.GenericLessThan = 0
RTL_GENERIC_COMPARE_RESULTS.GenericGreaterThan = 1
RTL_GENERIC_COMPARE_RESULTS.GenericEqual = 2
TP_CALLBACK_PRIORITY = v_enum()
TP_CALLBACK_PRIORITY.TP_CALLBACK_PRIORITY_HIGH = 0
TP_CALLBACK_PRIORITY.TP_CALLBACK_PRIORITY_NORMAL = 1
TP_CALLBACK_PRIORITY.TP_CALLBACK_PRIORITY_LOW = 2
TP_CALLBACK_PRIORITY.TP_CALLBACK_PRIORITY_INVALID = 3
TP_CALLBACK_PRIORITY.TP_CALLBACK_PRIORITY_COUNT = 3
FSINFOCLASS = v_enum()
FSINFOCLASS.FileFsVolumeInformation = 1
FSINFOCLASS.FileFsLabelInformation = 2
FSINFOCLASS.FileFsSizeInformation = 3
FSINFOCLASS.FileFsDeviceInformation = 4
FSINFOCLASS.FileFsAttributeInformation = 5
FSINFOCLASS.FileFsControlInformation = 6
FSINFOCLASS.FileFsFullSizeInformation = 7
FSINFOCLASS.FileFsObjectIdInformation = 8
FSINFOCLASS.FileFsDriverPathInformation = 9
FSINFOCLASS.FileFsVolumeFlagsInformation = 10
FSINFOCLASS.FileFsSectorSizeInformation = 11
FSINFOCLASS.FileFsDataCopyInformation = 12
FSINFOCLASS.FileFsMaximumInformation = 13
WORKING_SET_TYPE = v_enum()
WORKING_SET_TYPE.WorkingSetTypeUser = 0
WORKING_SET_TYPE.WorkingSetTypeSession = 1
WORKING_SET_TYPE.WorkingSetTypeSystemTypes = 2
WORKING_SET_TYPE.WorkingSetTypeSystemCache = 2
WORKING_SET_TYPE.WorkingSetTypePagedPool = 3
WORKING_SET_TYPE.WorkingSetTypeSystemPtes = 4
WORKING_SET_TYPE.WorkingSetTypeMaximum = 5
POOL_TYPE = v_enum()
POOL_TYPE.NonPagedPool = 0
POOL_TYPE.NonPagedPoolExecute = 0
POOL_TYPE.PagedPool = 1
POOL_TYPE.NonPagedPoolMustSucceed = 2
POOL_TYPE.DontUseThisType = 3
POOL_TYPE.NonPagedPoolCacheAligned = 4
POOL_TYPE.PagedPoolCacheAligned = 5
POOL_TYPE.NonPagedPoolCacheAlignedMustS = 6
POOL_TYPE.MaxPoolType = 7
POOL_TYPE.NonPagedPoolBase = 0
POOL_TYPE.NonPagedPoolBaseMustSucceed = 2
POOL_TYPE.NonPagedPoolBaseCacheAligned = 4
POOL_TYPE.NonPagedPoolBaseCacheAlignedMustS = 6
POOL_TYPE.NonPagedPoolSession = 32
POOL_TYPE.PagedPoolSession = 33
POOL_TYPE.NonPagedPoolMustSucceedSession = 34
POOL_TYPE.DontUseThisTypeSession = 35
POOL_TYPE.NonPagedPoolCacheAlignedSession = 36
POOL_TYPE.PagedPoolCacheAlignedSession = 37
POOL_TYPE.NonPagedPoolCacheAlignedMustSSession = 38
POOL_TYPE.NonPagedPoolNx = 512
POOL_TYPE.NonPagedPoolNxCacheAligned = 516
POOL_TYPE.NonPagedPoolSessionNx = 544
MODE = v_enum()
MODE.KernelMode = 0
MODE.UserMode = 1
MODE.MaximumMode = 2
FS_FILTER_SECTION_SYNC_TYPE = v_enum()
FS_FILTER_SECTION_SYNC_TYPE.SyncTypeOther = 0
FS_FILTER_SECTION_SYNC_TYPE.SyncTypeCreateSection = 1
PERFINFO_KERNELMEMORY_USAGE_TYPE = v_enum()
PERFINFO_KERNELMEMORY_USAGE_TYPE.PerfInfoMemUsagePfnMetadata = 0
PERFINFO_KERNELMEMORY_USAGE_TYPE.PerfInfoMemUsageMax = 1
FILE_OBJECT_EXTENSION_TYPE = v_enum()
FILE_OBJECT_EXTENSION_TYPE.FoExtTypeTransactionParams = 0
FILE_OBJECT_EXTENSION_TYPE.FoExtTypeInternal = 1
FILE_OBJECT_EXTENSION_TYPE.FoExtTypeIosbRange = 2
FILE_OBJECT_EXTENSION_TYPE.FoExtTypeGeneric = 3
FILE_OBJECT_EXTENSION_TYPE.FoExtTypeSfio = 4
FILE_OBJECT_EXTENSION_TYPE.FoExtTypeSymlink = 5
FILE_OBJECT_EXTENSION_TYPE.FoExtTypeOplockKey = 6
FILE_OBJECT_EXTENSION_TYPE.MaxFoExtTypes = 7
IRQ_PRIORITY = v_enum()
IRQ_PRIORITY.IrqPriorityUndefined = 0
IRQ_PRIORITY.IrqPriorityLow = 1
IRQ_PRIORITY.IrqPriorityNormal = 2
IRQ_PRIORITY.IrqPriorityHigh = 3
KPROFILE_SOURCE = v_enum()
KPROFILE_SOURCE.ProfileTime = 0
KPROFILE_SOURCE.ProfileAlignmentFixup = 1
KPROFILE_SOURCE.ProfileTotalIssues = 2
KPROFILE_SOURCE.ProfilePipelineDry = 3
KPROFILE_SOURCE.ProfileLoadInstructions = 4
KPROFILE_SOURCE.ProfilePipelineFrozen = 5
KPROFILE_SOURCE.ProfileBranchInstructions = 6
KPROFILE_SOURCE.ProfileTotalNonissues = 7
KPROFILE_SOURCE.ProfileDcacheMisses = 8
KPROFILE_SOURCE.ProfileIcacheMisses = 9
KPROFILE_SOURCE.ProfileCacheMisses = 10
KPROFILE_SOURCE.ProfileBranchMispredictions = 11
KPROFILE_SOURCE.ProfileStoreInstructions = 12
KPROFILE_SOURCE.ProfileFpInstructions = 13
KPROFILE_SOURCE.ProfileIntegerInstructions = 14
KPROFILE_SOURCE.Profile2Issue = 15
KPROFILE_SOURCE.Profile3Issue = 16
KPROFILE_SOURCE.Profile4Issue = 17
KPROFILE_SOURCE.ProfileSpecialInstructions = 18
KPROFILE_SOURCE.ProfileTotalCycles = 19
KPROFILE_SOURCE.ProfileIcacheIssues = 20
KPROFILE_SOURCE.ProfileDcacheAccesses = 21
KPROFILE_SOURCE.ProfileMemoryBarrierCycles = 22
KPROFILE_SOURCE.ProfileLoadLinkedIssues = 23
KPROFILE_SOURCE.ProfileMaximum = 24
MI_SYSTEM_VA_TYPE = v_enum()
MI_SYSTEM_VA_TYPE.MiVaUnused = 0
MI_SYSTEM_VA_TYPE.MiVaSessionSpace = 1
MI_SYSTEM_VA_TYPE.MiVaProcessSpace = 2
MI_SYSTEM_VA_TYPE.MiVaBootLoaded = 3
MI_SYSTEM_VA_TYPE.MiVaPfnDatabase = 4
MI_SYSTEM_VA_TYPE.MiVaNonPagedPool = 5
MI_SYSTEM_VA_TYPE.MiVaPagedPool = 6
MI_SYSTEM_VA_TYPE.MiVaSpecialPoolPaged = 7
MI_SYSTEM_VA_TYPE.MiVaSystemCache = 8
MI_SYSTEM_VA_TYPE.MiVaSystemPtes = 9
MI_SYSTEM_VA_TYPE.MiVaHal = 10
MI_SYSTEM_VA_TYPE.MiVaSessionGlobalSpace = 11
MI_SYSTEM_VA_TYPE.MiVaDriverImages = 12
MI_SYSTEM_VA_TYPE.MiVaSpecialPoolNonPaged = 13
MI_SYSTEM_VA_TYPE.MiVaPagedProtoPool = 14
MI_SYSTEM_VA_TYPE.MiVaMaximumType = 15
PROCESS_SECTION_TYPE = v_enum()
PROCESS_SECTION_TYPE.ProcessSectionData = 0
PROCESS_SECTION_TYPE.ProcessSectionImage = 1
PROCESS_SECTION_TYPE.ProcessSectionImageNx = 2
PROCESS_SECTION_TYPE.ProcessSectionPagefileBacked = 3
PROCESS_SECTION_TYPE.ProcessSectionMax = 4
LSA_FOREST_TRUST_RECORD_TYPE = v_enum()
LSA_FOREST_TRUST_RECORD_TYPE.ForestTrustTopLevelName = 0
LSA_FOREST_TRUST_RECORD_TYPE.ForestTrustTopLevelNameEx = 1
LSA_FOREST_TRUST_RECORD_TYPE.ForestTrustDomainInfo = 2
LSA_FOREST_TRUST_RECORD_TYPE.ForestTrustRecordTypeLast = 2
LDR_DDAG_STATE = v_enum()
LDR_DDAG_STATE.LdrModulesMerged = -5
LDR_DDAG_STATE.LdrModulesInitError = -4
LDR_DDAG_STATE.LdrModulesSnapError = -3
LDR_DDAG_STATE.LdrModulesUnloaded = -2
LDR_DDAG_STATE.LdrModulesUnloading = -1
LDR_DDAG_STATE.LdrModulesPlaceHolder = 0
LDR_DDAG_STATE.LdrModulesMapping = 1
LDR_DDAG_STATE.LdrModulesMapped = 2
LDR_DDAG_STATE.LdrModulesWaitingForDependencies = 3
LDR_DDAG_STATE.LdrModulesSnapping = 4
LDR_DDAG_STATE.LdrModulesSnapped = 5
LDR_DDAG_STATE.LdrModulesCondensed = 6
LDR_DDAG_STATE.LdrModulesReadyToInit = 7
LDR_DDAG_STATE.LdrModulesInitializing = 8
LDR_DDAG_STATE.LdrModulesReadyToRun = 9
MI_MEMORY_HIGHLOW = v_enum()
MI_MEMORY_HIGHLOW.MiMemoryHigh = 0
MI_MEMORY_HIGHLOW.MiMemoryLow = 1
MI_MEMORY_HIGHLOW.MiMemoryHighLow = 2
DEVICE_TEXT_TYPE = v_enum()
DEVICE_TEXT_TYPE.DeviceTextDescription = 0
DEVICE_TEXT_TYPE.DeviceTextLocationInformation = 1
MMLISTS = v_enum()
MMLISTS.ZeroedPageList = 0
MMLISTS.FreePageList = 1
MMLISTS.StandbyPageList = 2
MMLISTS.ModifiedPageList = 3
MMLISTS.ModifiedNoWritePageList = 4
MMLISTS.BadPageList = 5
MMLISTS.ActiveAndValid = 6
MMLISTS.TransitionPage = 7
KINTERRUPT_MODE = v_enum()
KINTERRUPT_MODE.LevelSensitive = 0
KINTERRUPT_MODE.Latched = 1
TOKEN_TYPE = v_enum()
TOKEN_TYPE.TokenPrimary = 1
TOKEN_TYPE.TokenImpersonation = 2
HARDWARE_COUNTER_TYPE = v_enum()
HARDWARE_COUNTER_TYPE.PMCCounter = 0
HARDWARE_COUNTER_TYPE.MaxHardwareCounterType = 1
TRANSFER_TYPE = v_enum()
TRANSFER_TYPE.ReadTransfer = 0
TRANSFER_TYPE.WriteTransfer = 1
TRANSFER_TYPE.OtherTransfer = 2
PNP_DEVNODE_STATE = v_enum()
PNP_DEVNODE_STATE.DeviceNodeUnspecified = 768
PNP_DEVNODE_STATE.DeviceNodeUninitialized = 769
PNP_DEVNODE_STATE.DeviceNodeInitialized = 770
PNP_DEVNODE_STATE.DeviceNodeDriversAdded = 771
PNP_DEVNODE_STATE.DeviceNodeResourcesAssigned = 772
PNP_DEVNODE_STATE.DeviceNodeStartPending = 773
PNP_DEVNODE_STATE.DeviceNodeStartCompletion = 774
PNP_DEVNODE_STATE.DeviceNodeStartPostWork = 775
PNP_DEVNODE_STATE.DeviceNodeStarted = 776
PNP_DEVNODE_STATE.DeviceNodeQueryStopped = 777
PNP_DEVNODE_STATE.DeviceNodeStopped = 778
PNP_DEVNODE_STATE.DeviceNodeRestartCompletion = 779
PNP_DEVNODE_STATE.DeviceNodeEnumeratePending = 780
PNP_DEVNODE_STATE.DeviceNodeEnumerateCompletion = 781
PNP_DEVNODE_STATE.DeviceNodeAwaitingQueuedDeletion = 782
PNP_DEVNODE_STATE.DeviceNodeAwaitingQueuedRemoval = 783
PNP_DEVNODE_STATE.DeviceNodeQueryRemoved = 784
PNP_DEVNODE_STATE.DeviceNodeRemovePendingCloses = 785
PNP_DEVNODE_STATE.DeviceNodeRemoved = 786
PNP_DEVNODE_STATE.DeviceNodeDeletePendingCloses = 787
PNP_DEVNODE_STATE.DeviceNodeDeleted = 788
PNP_DEVNODE_STATE.MaxDeviceNodeState = 789
class KEXECUTE_OPTIONS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExecuteDisable = v_uint8()
class IO_PRIORITY_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint32()
self.ThreadPriority = v_uint32()
self.PagePriority = v_uint32()
self.IoPriority = v_uint32()
class IOV_FORCED_PENDING_TRACE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Irp = v_ptr32()
self.Thread = v_ptr32()
self.StackTrace = vstruct.VArray([ v_ptr32() for i in xrange(62) ])
class SEGMENT_OBJECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BaseAddress = v_ptr32()
self.TotalNumberOfPtes = v_uint32()
self.SizeOfSegment = LARGE_INTEGER()
self.NonExtendedPtes = v_uint32()
self.ImageCommitment = v_uint32()
self.ControlArea = v_ptr32()
self.Subsection = v_ptr32()
self.MmSectionFlags = v_ptr32()
self.MmSubSectionFlags = v_ptr32()
class DUAL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.Map = v_ptr32()
self.SmallDir = v_ptr32()
self.Guard = v_uint32()
self.FreeDisplay = vstruct.VArray([ FREE_DISPLAY() for i in xrange(24) ])
self.FreeBins = LIST_ENTRY()
self.FreeSummary = v_uint32()
class SID(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Revision = v_uint8()
self.SubAuthorityCount = v_uint8()
self.IdentifierAuthority = SID_IDENTIFIER_AUTHORITY()
self.SubAuthority = vstruct.VArray([ v_uint32() for i in xrange(1) ])
class MMPTE_HARDWARE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Valid = v_uint64()
class POP_CPU_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Eax = v_uint32()
self.Ebx = v_uint32()
self.Ecx = v_uint32()
self.Edx = v_uint32()
class _unnamed_29146(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Failure = v_uint32()
self.Status = v_uint32()
self.Point = v_uint32()
class _unnamed_29147(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Action = v_uint32()
self.Handle = v_ptr32()
self.Status = v_uint32()
class WHEA_ERROR_PACKET_V2(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.Version = v_uint32()
self.Length = v_uint32()
self.Flags = WHEA_ERROR_PACKET_FLAGS()
self.ErrorType = v_uint32()
self.ErrorSeverity = v_uint32()
self.ErrorSourceId = v_uint32()
self.ErrorSourceType = v_uint32()
self.NotifyType = GUID()
self.Context = v_uint64()
self.DataFormat = v_uint32()
self.Reserved1 = v_uint32()
self.DataOffset = v_uint32()
self.DataLength = v_uint32()
self.PshedDataOffset = v_uint32()
self.PshedDataLength = v_uint32()
class CC_EXTERNAL_CACHE_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Callback = v_ptr32()
self.DirtyPageStatistics = DIRTY_PAGE_STATISTICS()
self.Links = LIST_ENTRY()
class GROUP_AFFINITY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Mask = v_uint32()
self.Group = v_uint16()
self.Reserved = vstruct.VArray([ v_uint16() for i in xrange(3) ])
class VI_VERIFIER_ISSUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IssueType = v_uint32()
self.Address = v_ptr32()
self.Parameters = vstruct.VArray([ v_uint32() for i in xrange(2) ])
class POP_IRP_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Link = LIST_ENTRY()
self.Irp = v_ptr32()
self.Pdo = v_ptr32()
self.TargetDevice = v_ptr32()
self.CurrentDevice = v_ptr32()
self.WatchdogStart = v_uint64()
self.WatchdogTimer = KTIMER()
self.WatchdogDpc = KDPC()
self.MinorFunction = v_uint8()
self._pad006c = v_bytes(size=3)
self.PowerStateType = v_uint32()
self.PowerState = POWER_STATE()
self.WatchdogEnabled = v_uint8()
self._pad0078 = v_bytes(size=3)
self.FxDevice = v_ptr32()
self.SystemTransition = v_uint8()
self.NotifyPEP = v_uint8()
self._pad0080 = v_bytes(size=2)
self.Device = _unnamed_34060()
class HEAP_STOP_ON_VALUES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AllocAddress = v_uint32()
self.AllocTag = HEAP_STOP_ON_TAG()
self.ReAllocAddress = v_uint32()
self.ReAllocTag = HEAP_STOP_ON_TAG()
self.FreeAddress = v_uint32()
self.FreeTag = HEAP_STOP_ON_TAG()
class _unnamed_29148(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CheckStack = v_ptr32()
class _unnamed_29149(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Cell = v_uint32()
self.CellPoint = v_ptr32()
self.RootPoint = v_ptr32()
self.Index = v_uint32()
class KTSS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Backlink = v_uint16()
self.Reserved0 = v_uint16()
self.Esp0 = v_uint32()
self.Ss0 = v_uint16()
self.Reserved1 = v_uint16()
self.NotUsed1 = vstruct.VArray([ v_uint32() for i in xrange(4) ])
self.CR3 = v_uint32()
self.Eip = v_uint32()
self.EFlags = v_uint32()
self.Eax = v_uint32()
self.Ecx = v_uint32()
self.Edx = v_uint32()
self.Ebx = v_uint32()
self.Esp = v_uint32()
self.Ebp = v_uint32()
self.Esi = v_uint32()
self.Edi = v_uint32()
self.Es = v_uint16()
self.Reserved2 = v_uint16()
self.Cs = v_uint16()
self.Reserved3 = v_uint16()
self.Ss = v_uint16()
self.Reserved4 = v_uint16()
self.Ds = v_uint16()
self.Reserved5 = v_uint16()
self.Fs = v_uint16()
self.Reserved6 = v_uint16()
self.Gs = v_uint16()
self.Reserved7 = v_uint16()
self.LDT = v_uint16()
self.Reserved8 = v_uint16()
self.Flags = v_uint16()
self.IoMapBase = v_uint16()
self.IoMaps = vstruct.VArray([ KiIoAccessMap() for i in xrange(1) ])
self.IntDirectionMap = vstruct.VArray([ v_uint8() for i in xrange(32) ])
class CURDIR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DosPath = UNICODE_STRING()
self.Handle = v_ptr32()
class DBGKD_GET_INTERNAL_BREAKPOINT32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BreakpointAddress = v_uint32()
self.Flags = v_uint32()
self.Calls = v_uint32()
self.MaxCallsPerPeriod = v_uint32()
self.MinInstructions = v_uint32()
self.MaxInstructions = v_uint32()
self.TotalInstructions = v_uint32()
class PO_IRP_MANAGER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeviceIrpQueue = PO_IRP_QUEUE()
self.SystemIrpQueue = PO_IRP_QUEUE()
class DBGKD_MANIPULATE_STATE32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ApiNumber = v_uint32()
self.ProcessorLevel = v_uint16()
self.Processor = v_uint16()
self.ReturnStatus = v_uint32()
self.u = _unnamed_30210()
class ETW_BUFFER_QUEUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.QueueHead = v_ptr32()
self.QueueTail = v_ptr32()
self.QueueEntry = SINGLE_LIST_ENTRY()
class SEP_TOKEN_PRIVILEGES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Present = v_uint64()
self.Enabled = v_uint64()
self.EnabledByDefault = v_uint64()
class KALPC_SECTION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SectionObject = v_ptr32()
self.Size = v_uint32()
self.HandleTable = v_ptr32()
self.SectionHandle = v_ptr32()
self.OwnerProcess = v_ptr32()
self.OwnerPort = v_ptr32()
self.u1 = _unnamed_30828()
self.NumberOfRegions = v_uint32()
self.RegionListHead = LIST_ENTRY()
class _unnamed_30905(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Secure = v_uint32()
class _unnamed_30902(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.s1 = _unnamed_30905()
class PERFINFO_GROUPMASK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Masks = vstruct.VArray([ v_uint32() for i in xrange(8) ])
class HARDWARE_PTE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Valid = v_uint64()
class ETW_PERF_COUNTERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TotalActiveSessions = v_uint32()
self.TotalBufferMemoryNonPagedPool = v_uint32()
self.TotalBufferMemoryPagedPool = v_uint32()
self.TotalGuidsEnabled = v_uint32()
self.TotalGuidsNotEnabled = v_uint32()
self.TotalGuidsPreEnabled = v_uint32()
class HANDLE_TABLE_ENTRY_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AuditMask = v_uint32()
class DBGKD_WRITE_MEMORY32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TargetBaseAddress = v_uint32()
self.TransferCount = v_uint32()
self.ActualBytesWritten = v_uint32()
class POP_FX_WORK_ORDER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WorkItem = WORK_QUEUE_ITEM()
self.WorkCount = v_uint32()
class _unnamed_34666(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Group = v_uint16()
self.MessageCount = v_uint16()
self.Vector = v_uint32()
self.Affinity = v_uint32()
class _unnamed_28148(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PowerSequence = v_ptr32()
class WHEA_ERROR_RECORD_SECTION_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SectionOffset = v_uint32()
self.SectionLength = v_uint32()
self.Revision = WHEA_REVISION()
self.ValidBits = WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_VALIDBITS()
self.Reserved = v_uint8()
self.Flags = WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_FLAGS()
self.SectionType = GUID()
self.FRUId = GUID()
self.SectionSeverity = v_uint32()
self.FRUText = vstruct.VArray([ v_uint8() for i in xrange(20) ])
class EX_WORK_QUEUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WorkerQueue = KQUEUE()
self.WorkItemsProcessed = v_uint32()
self.WorkItemsProcessedLastPass = v_uint32()
self.ThreadCount = v_uint32()
self.TryFailed = v_uint8()
self._pad0038 = v_bytes(size=3)
class MMWSLENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Valid = v_uint32()
class PNP_DEVICE_COMPLETION_REQUEST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.DeviceNode = v_ptr32()
self.Context = v_ptr32()
self.CompletionState = v_uint32()
self.IrpPended = v_uint32()
self.Status = v_uint32()
self.Information = v_ptr32()
self.ReferenceCount = v_uint32()
class CHILD_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint32()
self.List = v_uint32()
class _unnamed_31482(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MajorVersion = v_uint8()
self.MinorVersion = v_uint8()
self.SubVersion = v_uint8()
self.SubMinorVersion = v_uint8()
class PROC_FEEDBACK_COUNTER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InstantaneousRead = v_ptr32()
self._pad0008 = v_bytes(size=4)
self.LastActualCount = v_uint64()
self.LastReferenceCount = v_uint64()
self.CachedValue = v_uint32()
self._pad0020 = v_bytes(size=4)
self.Affinitized = v_uint8()
self.Differential = v_uint8()
self.DisableInterrupts = v_uint8()
self._pad0024 = v_bytes(size=1)
self.Context = v_uint32()
class CM_PARTIAL_RESOURCE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Version = v_uint16()
self.Revision = v_uint16()
self.Count = v_uint32()
self.PartialDescriptors = vstruct.VArray([ CM_PARTIAL_RESOURCE_DESCRIPTOR() for i in xrange(1) ])
class _unnamed_29717(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Disk = _unnamed_34794()
class DBGKD_RESTORE_BREAKPOINT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BreakPointHandle = v_uint32()
class PEPHANDLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.unused = v_uint32()
class IMAGE_SECURITY_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PageHashes = v_ptr32()
class DEVICE_CAPABILITIES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self.Version = v_uint16()
self.DeviceD1 = v_uint32()
self.Address = v_uint32()
self.UINumber = v_uint32()
self.DeviceState = vstruct.VArray([ DEVICE_POWER_STATE() for i in xrange(7) ])
self.SystemWake = v_uint32()
self.DeviceWake = v_uint32()
self.D1Latency = v_uint32()
self.D2Latency = v_uint32()
self.D3Latency = v_uint32()
class IOP_FILE_OBJECT_EXTENSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FoExtFlags = v_uint32()
self.FoExtPerTypeExtension = vstruct.VArray([ v_ptr32() for i in xrange(7) ])
self.FoIoPriorityHint = v_uint32()
class IOP_IRP_EXTENSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExtensionFlags = v_uint16()
self.TypesAllocated = v_uint16()
self.ActivityId = GUID()
self._pad0018 = v_bytes(size=4)
self.Timestamp = LARGE_INTEGER()
class _unnamed_34061(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NotifyDevice = v_ptr32()
self.FxDeviceActivated = v_uint8()
self._pad0008 = v_bytes(size=3)
class _unnamed_34060(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CallerCompletion = v_ptr32()
self.CallerContext = v_ptr32()
self.CallerDevice = v_ptr32()
self.SystemWake = v_uint8()
self._pad0010 = v_bytes(size=3)
class ACL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AclRevision = v_uint8()
self.Sbz1 = v_uint8()
self.AclSize = v_uint16()
self.AceCount = v_uint16()
self.Sbz2 = v_uint16()
class PCW_INSTANCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class VOLUME_CACHE_MAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NodeTypeCode = v_uint16()
self.NodeByteCode = v_uint16()
self.UseCount = v_uint32()
self.DeviceObject = v_ptr32()
self.VolumeCacheMapLinks = LIST_ENTRY()
self.DirtyPages = v_uint32()
self.LogHandleContext = LOG_HANDLE_CONTEXT()
self.Flags = v_uint32()
self.PagesQueuedToDisk = v_uint32()
self.LoggedPagesQueuedToDisk = v_uint32()
self._pad0078 = v_bytes(size=4)
class CALLBACK_OBJECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class RTL_RANGE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = v_uint64()
self.End = v_uint64()
self.UserData = v_ptr32()
self.Owner = v_ptr32()
self.Attributes = v_uint8()
self.Flags = v_uint8()
self._pad0020 = v_bytes(size=6)
class _unnamed_34324(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SnapSharedExportsFailed = v_uint32()
class HEAP_FREE_ENTRY_EXTRA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TagIndex = v_uint16()
self.FreeBackTraceIndex = v_uint16()
class EXCEPTION_RECORD64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExceptionCode = v_uint32()
self.ExceptionFlags = v_uint32()
self.ExceptionRecord = v_uint64()
self.ExceptionAddress = v_uint64()
self.NumberParameters = v_uint32()
self.unusedAlignment = v_uint32()
self.ExceptionInformation = vstruct.VArray([ v_uint64() for i in xrange(15) ])
class SEP_LOWBOX_NUMBER_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.HashEntry = RTL_DYNAMIC_HASH_TABLE_ENTRY()
self.ReferenceCount = v_uint32()
self.PackageSid = v_ptr32()
self.LowboxNumber = v_uint32()
self.AtomTable = v_ptr32()
class KPROCESS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DISPATCHER_HEADER()
self.ProfileListHead = LIST_ENTRY()
self.DirectoryTableBase = v_uint32()
self.LdtDescriptor = KGDTENTRY()
self.Int21Descriptor = KIDTENTRY()
self.ThreadListHead = LIST_ENTRY()
self.ProcessLock = v_uint32()
self.Affinity = KAFFINITY_EX()
self.ReadyListHead = LIST_ENTRY()
self.SwapListEntry = SINGLE_LIST_ENTRY()
self.ActiveProcessors = KAFFINITY_EX()
self.AutoAlignment = v_uint32()
self.BasePriority = v_uint8()
self.QuantumReset = v_uint8()
self.Visited = v_uint8()
self.Flags = KEXECUTE_OPTIONS()
self.ThreadSeed = vstruct.VArray([ v_uint32() for i in xrange(1) ])
self.IdealNode = vstruct.VArray([ v_uint16() for i in xrange(1) ])
self.IdealGlobalNode = v_uint16()
self.Spare1 = v_uint16()
self.IopmOffset = v_uint16()
self.SchedulingGroup = v_ptr32()
self.StackCount = KSTACK_COUNT()
self.ProcessListEntry = LIST_ENTRY()
self.CycleTime = v_uint64()
self.ContextSwitches = v_uint64()
self.FreezeCount = v_uint32()
self.KernelTime = v_uint32()
self.UserTime = v_uint32()
self.VdmTrapcHandler = v_ptr32()
class ALPC_COMMUNICATION_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ConnectionPort = v_ptr32()
self.ServerCommunicationPort = v_ptr32()
self.ClientCommunicationPort = v_ptr32()
self.CommunicationList = LIST_ENTRY()
self.HandleTable = ALPC_HANDLE_TABLE()
class DEVICE_OBJECT_POWER_EXTENSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IdleCount = v_uint32()
self.BusyCount = v_uint32()
self.BusyReference = v_uint32()
self.TotalBusyCount = v_uint32()
self.ConservationIdleTime = v_uint32()
self.PerformanceIdleTime = v_uint32()
self.DeviceObject = v_ptr32()
self.IdleList = LIST_ENTRY()
self.IdleType = v_uint32()
self.IdleState = v_uint32()
self.CurrentState = v_uint32()
self.Volume = LIST_ENTRY()
self.Specific = _unnamed_29717()
class _unnamed_27983(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_ptr32()
self.Key = v_uint32()
self.ByteOffset = LARGE_INTEGER()
class _unnamed_30991(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Initialized = v_uint32()
class _unnamed_27988(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.OutputBufferLength = v_uint32()
self.InputBufferLength = v_uint32()
self.IoControlCode = v_uint32()
self.Type3InputBuffer = v_ptr32()
class HEAP_TAG_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Allocs = v_uint32()
self.Frees = v_uint32()
self.Size = v_uint32()
self.TagIndex = v_uint16()
self.CreatorBackTraceIndex = v_uint16()
self.TagName = vstruct.VArray([ v_uint16() for i in xrange(24) ])
class VI_DEADLOCK_RESOURCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint32()
self.NodeCount = v_uint32()
self.ResourceAddress = v_ptr32()
self.ThreadOwner = v_ptr32()
self.ResourceList = LIST_ENTRY()
self.HashChainList = LIST_ENTRY()
self.StackTrace = vstruct.VArray([ v_ptr32() for i in xrange(8) ])
self.LastAcquireTrace = vstruct.VArray([ v_ptr32() for i in xrange(8) ])
self.LastReleaseTrace = vstruct.VArray([ v_ptr32() for i in xrange(8) ])
class PROCESSOR_IDLE_PREPARE_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Context = v_ptr32()
self._pad0008 = v_bytes(size=4)
self.Constraints = PROCESSOR_IDLE_CONSTRAINTS()
self.DependencyCount = v_uint32()
self.DependencyUsed = v_uint32()
self.DependencyArray = v_ptr32()
self.PlatformIdleStateIndex = v_uint32()
self.ProcessorIdleStateIndex = v_uint32()
self.IdleSelectFailureMask = v_uint32()
class ALPC_COMPLETION_LIST_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.u1 = _unnamed_34963()
class WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Primary = v_uint32()
class TP_CALLBACK_ENVIRON_V3(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Version = v_uint32()
self.Pool = v_ptr32()
self.CleanupGroup = v_ptr32()
self.CleanupGroupCancelCallback = v_ptr32()
self.RaceDll = v_ptr32()
self.ActivationContext = v_ptr32()
self.FinalizationCallback = v_ptr32()
self.u = _unnamed_25485()
self.CallbackPriority = v_uint32()
self.Size = v_uint32()
class WHEAP_INFO_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ErrorSourceCount = v_uint32()
self.ErrorSourceTable = v_ptr32()
self.WorkQueue = v_ptr32()
class SEP_LOWBOX_HANDLES_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.HashEntry = RTL_DYNAMIC_HASH_TABLE_ENTRY()
self.ReferenceCount = v_uint32()
self.PackageSid = v_ptr32()
self.HandleCount = v_uint32()
self.Handles = v_ptr32()
class VI_POOL_ENTRY_INUSE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.VirtualAddress = v_ptr32()
self.CallingAddress = v_ptr32()
self.NumberOfBytes = v_uint32()
self.Tag = v_uint32()
class MEMORY_ALLOCATION_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.MemoryType = v_uint32()
self.BasePage = v_uint32()
self.PageCount = v_uint32()
class MMPTE_TRANSITION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Valid = v_uint64()
class WHEA_ERROR_PACKET_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PreviousError = v_uint32()
class ARM_DBGKD_CONTROL_SET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Continue = v_uint32()
self.CurrentSymbolStart = v_uint32()
self.CurrentSymbolEnd = v_uint32()
class ALPC_PROCESS_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Lock = EX_PUSH_LOCK()
self.ViewListHead = LIST_ENTRY()
self.PagedPoolQuotaCache = v_uint32()
class DIAGNOSTIC_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CallerType = v_uint32()
self.Process = v_ptr32()
self.ServiceTag = v_uint32()
self.ReasonSize = v_uint32()
class OBJECT_HANDLE_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.HandleAttributes = v_uint32()
self.GrantedAccess = v_uint32()
class KSPIN_LOCK_QUEUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.Lock = v_ptr32()
class _unnamed_34394(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NotificationCode = v_uint32()
self.NotificationData = v_uint32()
class _unnamed_34397(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.VetoType = v_uint32()
self.DeviceIdVetoNameBuffer = vstruct.VArray([ v_uint16() for i in xrange(1) ])
self._pad0008 = v_bytes(size=2)
class HEAP_LOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Lock = _unnamed_30634()
class XSTATE_CONFIGURATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.EnabledFeatures = v_uint64()
self.EnabledVolatileFeatures = v_uint64()
self.Size = v_uint32()
self.OptimizedSave = v_uint32()
self.Features = vstruct.VArray([ XSTATE_FEATURE() for i in xrange(64) ])
class PS_CLIENT_SECURITY_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ImpersonationData = v_uint32()
class RTL_AVL_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BalancedRoot = RTL_BALANCED_LINKS()
self.OrderedPointer = v_ptr32()
self.WhichOrderedElement = v_uint32()
self.NumberGenericTableElements = v_uint32()
self.DepthOfTree = v_uint32()
self.RestartKey = v_ptr32()
self.DeleteCount = v_uint32()
self.CompareRoutine = v_ptr32()
self.AllocateRoutine = v_ptr32()
self.FreeRoutine = v_ptr32()
self.TableContext = v_ptr32()
class POP_FX_DEPENDENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Index = v_uint32()
self.ProviderIndex = v_uint32()
class RTL_SPLAY_LINKS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Parent = v_ptr32()
self.LeftChild = v_ptr32()
self.RightChild = v_ptr32()
class _unnamed_26252(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Bytes = _unnamed_32530()
class PNP_ASSIGN_RESOURCES_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IncludeFailedDevices = v_uint32()
self.DeviceCount = v_uint32()
self.DeviceList = vstruct.VArray([ v_ptr32() for i in xrange(1) ])
class AUTHZBASEP_CLAIM_ATTRIBUTES_COLLECTION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeviceGroupsCount = v_uint32()
self.pDeviceGroups = v_ptr32()
self.RestrictedDeviceGroupsCount = v_uint32()
self.pRestrictedDeviceGroups = v_ptr32()
self.DeviceGroupsHash = SID_AND_ATTRIBUTES_HASH()
self.RestrictedDeviceGroupsHash = SID_AND_ATTRIBUTES_HASH()
self.pUserSecurityAttributes = v_ptr32()
self.pDeviceSecurityAttributes = v_ptr32()
self.pRestrictedUserSecurityAttributes = v_ptr32()
self.pRestrictedDeviceSecurityAttributes = v_ptr32()
class MAPPED_FILE_SEGMENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ControlArea = v_ptr32()
self.TotalNumberOfPtes = v_uint32()
self.SegmentFlags = SEGMENT_FLAGS()
self.NumberOfCommittedPages = v_uint32()
self.SizeOfSegment = v_uint64()
self.ExtendInfo = v_ptr32()
self.SegmentLock = EX_PUSH_LOCK()
class OWNER_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.OwnerThread = v_uint32()
self.IoPriorityBoosted = v_uint32()
class EX_PUSH_LOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Locked = v_uint32()
class DEVOBJ_EXTENSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.DeviceObject = v_ptr32()
self.PowerFlags = v_uint32()
self.Dope = v_ptr32()
self.ExtensionFlags = v_uint32()
self.DeviceNode = v_ptr32()
self.AttachedTo = v_ptr32()
self.StartIoCount = v_uint32()
self.StartIoKey = v_uint32()
self.StartIoFlags = v_uint32()
self.Vpb = v_ptr32()
self.DependentList = LIST_ENTRY()
self.ProviderList = LIST_ENTRY()
class _unnamed_28186(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AllocatedResources = v_ptr32()
self.AllocatedResourcesTranslated = v_ptr32()
class KSTACK_CONTROL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.StackBase = v_uint32()
self.ActualLimit = v_uint32()
self.PreviousTrapFrame = v_ptr32()
self.PreviousExceptionList = v_ptr32()
self.Previous = KERNEL_STACK_SEGMENT()
class ARBITER_ALLOCATION_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = v_uint64()
self.End = v_uint64()
self.CurrentMinimum = v_uint64()
self.CurrentMaximum = v_uint64()
self.Entry = v_ptr32()
self.CurrentAlternative = v_ptr32()
self.AlternativeCount = v_uint32()
self.Alternatives = v_ptr32()
self.Flags = v_uint16()
self.RangeAttributes = v_uint8()
self.RangeAvailableAttributes = v_uint8()
self.WorkSpace = v_uint32()
class BLOB_TYPE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ResourceId = v_uint32()
self.PoolTag = v_uint32()
self.LookasideIndex = v_uint32()
self.Flags = v_uint32()
self.Counters = v_ptr32()
self.DeleteProcedure = v_ptr32()
self.DestroyProcedure = v_ptr32()
self.UsualSize = v_uint32()
class PNP_DEVICE_ACTION_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.DeviceObject = v_ptr32()
self.RequestType = v_uint32()
self.ReorderingBarrier = v_uint8()
self._pad0014 = v_bytes(size=3)
self.RequestArgument = v_uint32()
self.CompletionEvent = v_ptr32()
self.CompletionStatus = v_ptr32()
class OPEN_PACKET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.FileObject = v_ptr32()
self.FinalStatus = v_uint32()
self.Information = v_uint32()
self.ParseCheck = v_uint32()
self.RelatedFileObject = v_ptr32()
self.OriginalAttributes = v_ptr32()
self._pad0020 = v_bytes(size=4)
self.AllocationSize = LARGE_INTEGER()
self.CreateOptions = v_uint32()
self.FileAttributes = v_uint16()
self.ShareAccess = v_uint16()
self.EaBuffer = v_ptr32()
self.EaLength = v_uint32()
self.Options = v_uint32()
self.Disposition = v_uint32()
self.BasicInformation = v_ptr32()
self.NetworkInformation = v_ptr32()
self.CreateFileType = v_uint32()
self.MailslotOrPipeParameters = v_ptr32()
self.Override = v_uint8()
self.QueryOnly = v_uint8()
self.DeleteOnly = v_uint8()
self.FullAttributes = v_uint8()
self.LocalFileObject = v_ptr32()
self.InternalFlags = v_uint32()
self.AccessMode = v_uint8()
self._pad0060 = v_bytes(size=3)
self.DriverCreateContext = IO_DRIVER_CREATE_CONTEXT()
class HANDLE_TABLE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.VolatileLowValue = v_uint32()
self.HighValue = v_uint32()
class HEAP_COUNTERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TotalMemoryReserved = v_uint32()
self.TotalMemoryCommitted = v_uint32()
self.TotalMemoryLargeUCR = v_uint32()
self.TotalSizeInVirtualBlocks = v_uint32()
self.TotalSegments = v_uint32()
self.TotalUCRs = v_uint32()
self.CommittOps = v_uint32()
self.DeCommitOps = v_uint32()
self.LockAcquires = v_uint32()
self.LockCollisions = v_uint32()
self.CommitRate = v_uint32()
self.DecommittRate = v_uint32()
self.CommitFailures = v_uint32()
self.InBlockCommitFailures = v_uint32()
self.PollIntervalCounter = v_uint32()
self.DecommitsSinceLastCheck = v_uint32()
self.HeapPollInterval = v_uint32()
self.AllocAndFreeOps = v_uint32()
self.AllocationIndicesActive = v_uint32()
self.InBlockDeccommits = v_uint32()
self.InBlockDeccomitSize = v_uint32()
self.HighWatermarkSize = v_uint32()
self.LastPolledSize = v_uint32()
class WHEA_MEMORY_ERROR_SECTION_VALIDBITS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ErrorStatus = v_uint64()
class BLOB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ResourceList = LIST_ENTRY()
self.u1 = _unnamed_30805()
self.ResourceId = v_uint8()
self.CachedReferences = v_uint16()
self.ReferenceCount = v_uint32()
self.Pad = v_uint32()
self.Lock = EX_PUSH_LOCK()
class WORK_QUEUE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WorkQueueLinks = LIST_ENTRY()
self.Parameters = _unnamed_30472()
self.Function = v_uint8()
self._pad0010 = v_bytes(size=3)
class PI_BUS_EXTENSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint32()
self.NumberCSNs = v_uint8()
self._pad0008 = v_bytes(size=3)
self.ReadDataPort = v_ptr32()
self.DataPortMapped = v_uint8()
self._pad0010 = v_bytes(size=3)
self.AddressPort = v_ptr32()
self.AddrPortMapped = v_uint8()
self._pad0018 = v_bytes(size=3)
self.CommandPort = v_ptr32()
self.CmdPortMapped = v_uint8()
self._pad0020 = v_bytes(size=3)
self.NextSlotNumber = v_uint32()
self.DeviceList = SINGLE_LIST_ENTRY()
self.CardList = SINGLE_LIST_ENTRY()
self.PhysicalBusDevice = v_ptr32()
self.FunctionalBusDevice = v_ptr32()
self.AttachedDevice = v_ptr32()
self.BusNumber = v_uint32()
self.SystemPowerState = v_uint32()
self.DevicePowerState = v_uint32()
class MAILSLOT_CREATE_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MailslotQuota = v_uint32()
self.MaximumMessageSize = v_uint32()
self.ReadTimeout = LARGE_INTEGER()
self.TimeoutSpecified = v_uint8()
self._pad0018 = v_bytes(size=7)
class _unnamed_28043(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InterfaceType = v_ptr32()
self.Size = v_uint16()
self.Version = v_uint16()
self.Interface = v_ptr32()
self.InterfaceSpecificData = v_ptr32()
class FS_FILTER_CALLBACK_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SizeOfFsFilterCallbackData = v_uint32()
self.Operation = v_uint8()
self.Reserved = v_uint8()
self._pad0008 = v_bytes(size=2)
self.DeviceObject = v_ptr32()
self.FileObject = v_ptr32()
self.Parameters = FS_FILTER_PARAMETERS()
class PPM_IDLE_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DomainMembers = KAFFINITY_EX()
self.Latency = v_uint32()
self.Power = v_uint32()
self.StateFlags = v_uint32()
self.StateType = v_uint8()
self.InterruptsEnabled = v_uint8()
self.Interruptible = v_uint8()
self.ContextRetained = v_uint8()
self.CacheCoherent = v_uint8()
self._pad0020 = v_bytes(size=3)
class IO_RESOURCE_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Option = v_uint8()
self.Type = v_uint8()
self.ShareDisposition = v_uint8()
self.Spare1 = v_uint8()
self.Flags = v_uint16()
self.Spare2 = v_uint16()
self.u = _unnamed_33989()
class ACCESS_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.OperationID = LUID()
self.SecurityEvaluated = v_uint8()
self.GenerateAudit = v_uint8()
self.GenerateOnClose = v_uint8()
self.PrivilegesAllocated = v_uint8()
self.Flags = v_uint32()
self.RemainingDesiredAccess = v_uint32()
self.PreviouslyGrantedAccess = v_uint32()
self.OriginalDesiredAccess = v_uint32()
self.SubjectSecurityContext = SECURITY_SUBJECT_CONTEXT()
self.SecurityDescriptor = v_ptr32()
self.AuxData = v_ptr32()
self.Privileges = _unnamed_27432()
self.AuditPrivileges = v_uint8()
self._pad0064 = v_bytes(size=3)
self.ObjectName = UNICODE_STRING()
self.ObjectTypeName = UNICODE_STRING()
class RTL_ATOM_TABLE_REFERENCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowBoxList = LIST_ENTRY()
self.LowBoxID = v_uint32()
self.ReferenceCount = v_uint16()
self.Flags = v_uint16()
class DBGKD_SWITCH_PARTITION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Partition = v_uint32()
class TP_CALLBACK_INSTANCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class _unnamed_27809(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeviceQueueEntry = KDEVICE_QUEUE_ENTRY()
self.Thread = v_ptr32()
self.AuxiliaryBuffer = v_ptr32()
self.ListEntry = LIST_ENTRY()
self.CurrentStackLocation = v_ptr32()
self.OriginalFileObject = v_ptr32()
self.IrpExtension = v_ptr32()
class PROC_IDLE_ACCOUNTING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.StateCount = v_uint32()
self.TotalTransitions = v_uint32()
self.ResetCount = v_uint32()
self.AbortCount = v_uint32()
self.StartTime = v_uint64()
self.PriorIdleTime = v_uint64()
self.TimeUnit = v_uint32()
self._pad0028 = v_bytes(size=4)
self.State = vstruct.VArray([ PROC_IDLE_STATE_ACCOUNTING() for i in xrange(1) ])
class _unnamed_32010(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.XpfMceDescriptor = WHEA_XPF_MCE_DESCRIPTOR()
self._pad03a4 = v_bytes(size=12)
class GDI_TEB_BATCH(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Offset = v_uint32()
self.HDC = v_uint32()
self.Buffer = vstruct.VArray([ v_uint32() for i in xrange(310) ])
class DBGKD_SET_SPECIAL_CALL32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SpecialCall = v_uint32()
class STRING32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint16()
self.MaximumLength = v_uint16()
self.Buffer = v_uint32()
class DBGKD_LOAD_SYMBOLS32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PathNameLength = v_uint32()
self.BaseOfDll = v_uint32()
self.ProcessId = v_uint32()
self.CheckSum = v_uint32()
self.SizeOfImage = v_uint32()
self.UnloadSymbols = v_uint8()
self._pad0018 = v_bytes(size=3)
class tagSWITCH_CONTEXT_ATTRIBUTE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ulContextUpdateCounter = v_uint64()
self.fAllowContextUpdate = v_uint32()
self.fEnableTrace = v_uint32()
self.EtwHandle = v_uint64()
class DBGKM_EXCEPTION32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExceptionRecord = EXCEPTION_RECORD32()
self.FirstChance = v_uint32()
class _unnamed_29100(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IoStatus = IO_STATUS_BLOCK()
class PAGEFAULT_HISTORY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class PNP_RESERVED_PROVIDER_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.DependentList = LIST_ENTRY()
self.ReservationId = UNICODE_STRING()
self.ReferenceCount = v_uint32()
class ECP_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.Flags = v_uint32()
self.EcpList = LIST_ENTRY()
class ENODE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Ncb = KNODE()
self.ExWorkerQueues = vstruct.VArray([ EX_WORK_QUEUE() for i in xrange(7) ])
self.ExpThreadSetManagerEvent = KEVENT()
self.ExpWorkerThreadBalanceManagerPtr = v_ptr32()
self.ExpWorkerSeed = v_uint32()
self.ExWorkerFullInit = v_uint32()
self._pad0280 = v_bytes(size=28)
class PROCESSOR_PERFSTATE_POLICY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Revision = v_uint32()
self.MaxThrottle = v_uint8()
self.MinThrottle = v_uint8()
self.BusyAdjThreshold = v_uint8()
self.Spare = v_uint8()
self.TimeCheck = v_uint32()
self.IncreaseTime = v_uint32()
self.DecreaseTime = v_uint32()
self.IncreasePercent = v_uint32()
self.DecreasePercent = v_uint32()
class NT_TIB64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExceptionList = v_uint64()
self.StackBase = v_uint64()
self.StackLimit = v_uint64()
self.SubSystemTib = v_uint64()
self.FiberData = v_uint64()
self.ArbitraryUserPointer = v_uint64()
self.Self = v_uint64()
class SECTION_OBJECT_POINTERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DataSectionObject = v_ptr32()
self.SharedCacheMap = v_ptr32()
self.ImageSectionObject = v_ptr32()
class MDL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.Size = v_uint16()
self.MdlFlags = v_uint16()
self.Process = v_ptr32()
self.MappedSystemVa = v_ptr32()
self.StartVa = v_ptr32()
self.ByteCount = v_uint32()
self.ByteOffset = v_uint32()
class _unnamed_30472(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Read = _unnamed_30473()
class KTRAP_FRAME(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DbgEbp = v_uint32()
self.DbgEip = v_uint32()
self.DbgArgMark = v_uint32()
self.DbgArgPointer = v_uint32()
self.TempSegCs = v_uint16()
self.Logging = v_uint8()
self.FrameType = v_uint8()
self.TempEsp = v_uint32()
self.Dr0 = v_uint32()
self.Dr1 = v_uint32()
self.Dr2 = v_uint32()
self.Dr3 = v_uint32()
self.Dr6 = v_uint32()
self.Dr7 = v_uint32()
self.SegGs = v_uint32()
self.SegEs = v_uint32()
self.SegDs = v_uint32()
self.Edx = v_uint32()
self.Ecx = v_uint32()
self.Eax = v_uint32()
self.PreviousPreviousMode = v_uint8()
self.EntropyQueueDpc = v_uint8()
self.Reserved = vstruct.VArray([ v_uint8() for i in xrange(2) ])
self.ExceptionList = v_ptr32()
self.SegFs = v_uint32()
self.Edi = v_uint32()
self.Esi = v_uint32()
self.Ebx = v_uint32()
self.Ebp = v_uint32()
self.ErrCode = v_uint32()
self.Eip = v_uint32()
self.SegCs = v_uint32()
self.EFlags = v_uint32()
self.HardwareEsp = v_uint32()
self.HardwareSegSs = v_uint32()
self.V86Es = v_uint32()
self.V86Ds = v_uint32()
self.V86Fs = v_uint32()
self.V86Gs = v_uint32()
class _unnamed_34931(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TestAllocation = ARBITER_TEST_ALLOCATION_PARAMETERS()
self._pad0010 = v_bytes(size=4)
class _unnamed_30475(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SharedCacheMap = v_ptr32()
class CM_INDEX_HINT_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint32()
self.HashKey = vstruct.VArray([ v_uint32() for i in xrange(1) ])
class _unnamed_30477(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Event = v_ptr32()
class PRIVATE_CACHE_MAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NodeTypeCode = v_uint16()
self._pad0004 = v_bytes(size=2)
self.ReadAheadMask = v_uint32()
self.FileObject = v_ptr32()
self._pad0010 = v_bytes(size=4)
self.FileOffset1 = LARGE_INTEGER()
self.BeyondLastByte1 = LARGE_INTEGER()
self.FileOffset2 = LARGE_INTEGER()
self.BeyondLastByte2 = LARGE_INTEGER()
self.SequentialReadCount = v_uint32()
self.ReadAheadLength = v_uint32()
self.ReadAheadOffset = LARGE_INTEGER()
self.ReadAheadBeyondLastByte = LARGE_INTEGER()
self.PrevReadAheadBeyondLastByte = v_uint64()
self.ReadAheadSpinLock = v_uint32()
self.PipelinedReadAheadRequestSize = v_uint32()
self.ReadAheadGrowth = v_uint32()
self.PrivateLinks = LIST_ENTRY()
self.ReadAheadWorkItem = v_ptr32()
class MMPTE_SOFTWARE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Valid = v_uint64()
class IO_TIMER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.TimerFlag = v_uint16()
self.TimerList = LIST_ENTRY()
self.TimerRoutine = v_ptr32()
self.Context = v_ptr32()
self.DeviceObject = v_ptr32()
class MM_STORE_KEY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.KeyLow = v_uint32()
class OBJECT_CREATE_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Attributes = v_uint32()
self.RootDirectory = v_ptr32()
self.ProbeMode = v_uint8()
self._pad000c = v_bytes(size=3)
self.PagedPoolCharge = v_uint32()
self.NonPagedPoolCharge = v_uint32()
self.SecurityDescriptorCharge = v_uint32()
self.SecurityDescriptor = v_ptr32()
self.SecurityQos = v_ptr32()
self.SecurityQualityOfService = SECURITY_QUALITY_OF_SERVICE()
class WHEA_REVISION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MinorRevision = v_uint8()
self.MajorRevision = v_uint8()
class KSECONDARY_IDT_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SpinLock = v_uint32()
self.ConnectLock = KEVENT()
self.LineMasked = v_uint8()
self._pad0018 = v_bytes(size=3)
self.InterruptList = v_ptr32()
class TP_CLEANUP_GROUP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class MM_SESSION_SPACE_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Initialized = v_uint32()
class CVDD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self._pad001c = v_bytes(size=24)
class EVENT_FILTER_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Id = v_uint16()
self.Version = v_uint8()
self.Reserved = vstruct.VArray([ v_uint8() for i in xrange(5) ])
self.InstanceId = v_uint64()
self.Size = v_uint32()
self.NextOffset = v_uint32()
class PROC_IDLE_SNAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Time = v_uint64()
self.Idle = v_uint64()
class POP_FX_DRIVER_CALLBACKS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ComponentActive = v_ptr32()
self.ComponentIdle = v_ptr32()
self.ComponentIdleState = v_ptr32()
self.DevicePowerRequired = v_ptr32()
self.DevicePowerNotRequired = v_ptr32()
self.PowerControl = v_ptr32()
self.ComponentCriticalTransition = v_ptr32()
class PO_NOTIFY_ORDER_LEVEL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeviceCount = v_uint32()
self.ActiveCount = v_uint32()
self.WaitSleep = LIST_ENTRY()
self.ReadySleep = LIST_ENTRY()
self.ReadyS0 = LIST_ENTRY()
self.WaitS0 = LIST_ENTRY()
class SECURITY_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Revision = v_uint8()
self.Sbz1 = v_uint8()
self.Control = v_uint16()
self.Owner = v_ptr32()
self.Group = v_ptr32()
self.Sacl = v_ptr32()
self.Dacl = v_ptr32()
class PCW_PROCESSOR_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IdleTime = v_uint64()
self.AvailableTime = v_uint64()
self.UserTime = v_uint64()
self.KernelTime = v_uint64()
self.Interrupts = v_uint32()
self._pad0028 = v_bytes(size=4)
self.DpcTime = v_uint64()
self.InterruptTime = v_uint64()
self.ClockInterrupts = v_uint32()
self.DpcCount = v_uint32()
self.DpcRate = v_uint32()
self._pad0048 = v_bytes(size=4)
self.C1Time = v_uint64()
self.C2Time = v_uint64()
self.C3Time = v_uint64()
self.C1Transitions = v_uint64()
self.C2Transitions = v_uint64()
self.C3Transitions = v_uint64()
self.ParkingStatus = v_uint32()
self.CurrentFrequency = v_uint32()
self.PercentMaxFrequency = v_uint32()
self.StateFlags = v_uint32()
self.NominalThroughput = v_uint32()
self.ActiveThroughput = v_uint32()
self.ScaledThroughput = v_uint64()
self.ScaledKernelThroughput = v_uint64()
self.AverageIdleTime = v_uint64()
self.IdleBreakEvents = v_uint64()
self.PerformanceLimit = v_uint32()
self.PerformanceLimitFlags = v_uint32()
class OBJECT_TYPE_INITIALIZER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint16()
self.ObjectTypeFlags = v_uint8()
self._pad0004 = v_bytes(size=1)
self.ObjectTypeCode = v_uint32()
self.InvalidAttributes = v_uint32()
self.GenericMapping = GENERIC_MAPPING()
self.ValidAccessMask = v_uint32()
self.RetainAccess = v_uint32()
self.PoolType = v_uint32()
self.DefaultPagedPoolCharge = v_uint32()
self.DefaultNonPagedPoolCharge = v_uint32()
self.DumpProcedure = v_ptr32()
self.OpenProcedure = v_ptr32()
self.CloseProcedure = v_ptr32()
self.DeleteProcedure = v_ptr32()
self.ParseProcedure = v_ptr32()
self.SecurityProcedure = v_ptr32()
self.QueryNameProcedure = v_ptr32()
self.OkayToCloseProcedure = v_ptr32()
self.WaitObjectFlagMask = v_uint32()
self.WaitObjectFlagOffset = v_uint16()
self.WaitObjectPointerOffset = v_uint16()
class VACB_LEVEL_REFERENCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Reference = v_uint32()
self.SpecialReference = v_uint32()
class XSTATE_SAVE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Reserved1 = v_uint64()
self.Reserved2 = v_uint32()
self.Prev = v_ptr32()
self.Reserved3 = v_ptr32()
self.Thread = v_ptr32()
self.Reserved4 = v_ptr32()
self.Level = v_uint8()
self._pad0020 = v_bytes(size=3)
class PTE_TRACKER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.Mdl = v_ptr32()
self.Count = v_uint32()
self.SystemVa = v_ptr32()
self.StartVa = v_ptr32()
self.Offset = v_uint32()
self.Length = v_uint32()
self.Page = v_uint32()
self.IoMapping = v_uint32()
self.StackTrace = vstruct.VArray([ v_ptr32() for i in xrange(7) ])
class HEAP_ENTRY_EXTRA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AllocatorBackTraceIndex = v_uint16()
self.TagIndex = v_uint16()
self.Settable = v_uint32()
class _unnamed_27861(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityContext = v_ptr32()
self.Options = v_uint32()
self.Reserved = v_uint16()
self.ShareAccess = v_uint16()
self.Parameters = v_ptr32()
class _unnamed_34023(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.UserData = v_uint32()
class HEAP_PSEUDO_TAG_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Allocs = v_uint32()
self.Frees = v_uint32()
self.Size = v_uint32()
class _unnamed_34026(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.u = _unnamed_34023()
class CM_KEY_REFERENCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.KeyCell = v_uint32()
self.KeyHive = v_ptr32()
class MMSECTION_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BeingDeleted = v_uint32()
class MI_SPECIAL_POOL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Lock = v_uint32()
self._pad0008 = v_bytes(size=4)
self.Paged = MI_PTE_CHAIN_HEAD()
self.NonPaged = MI_PTE_CHAIN_HEAD()
self.PagesInUse = v_uint32()
self.SpecialPoolPdes = RTL_BITMAP()
self._pad0048 = v_bytes(size=4)
class DBGKD_GET_INTERNAL_BREAKPOINT64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BreakpointAddress = v_uint64()
self.Flags = v_uint32()
self.Calls = v_uint32()
self.MaxCallsPerPeriod = v_uint32()
self.MinInstructions = v_uint32()
self.MaxInstructions = v_uint32()
self.TotalInstructions = v_uint32()
class CONTROL_AREA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Segment = v_ptr32()
self.ListHead = LIST_ENTRY()
self.NumberOfSectionReferences = v_uint32()
self.NumberOfPfnReferences = v_uint32()
self.NumberOfMappedViews = v_uint32()
self.NumberOfUserReferences = v_uint32()
self.u = _unnamed_28971()
self.FlushInProgressCount = v_uint32()
self.FilePointer = EX_FAST_REF()
self.ControlAreaLock = v_uint32()
self.ModifiedWriteCount = v_uint32()
self.WaitList = v_ptr32()
self.u2 = _unnamed_28974()
self.LockedPages = v_uint64()
class MODWRITER_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.KeepForever = v_uint32()
class _unnamed_35375(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.EndingOffset = v_ptr32()
self.ResourceToRelease = v_ptr32()
class _unnamed_35376(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ResourceToRelease = v_ptr32()
class CM_TRANS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TransactionListEntry = LIST_ENTRY()
self.KCBUoWListHead = LIST_ENTRY()
self.LazyCommitListEntry = LIST_ENTRY()
self.KtmTrans = v_ptr32()
self.CmRm = v_ptr32()
self.KtmEnlistmentObject = v_ptr32()
self.KtmEnlistmentHandle = v_ptr32()
self.KtmUow = GUID()
self.StartLsn = v_uint64()
self.TransState = v_uint32()
self.HiveCount = v_uint32()
self.HiveArray = vstruct.VArray([ v_ptr32() for i in xrange(7) ])
self._pad0068 = v_bytes(size=4)
class POP_POWER_ACTION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Updates = v_uint8()
self.State = v_uint8()
self.Shutdown = v_uint8()
self._pad0004 = v_bytes(size=1)
self.Action = v_uint32()
self.LightestState = v_uint32()
self.Flags = v_uint32()
self.Status = v_uint32()
self.DeviceType = v_uint32()
self.DeviceTypeFlags = v_uint32()
self.IrpMinor = v_uint8()
self.Waking = v_uint8()
self._pad0020 = v_bytes(size=2)
self.SystemState = v_uint32()
self.NextSystemState = v_uint32()
self.EffectiveSystemState = v_uint32()
self.CurrentSystemState = v_uint32()
self.ShutdownBugCode = v_ptr32()
self.DevState = v_ptr32()
self.HiberContext = v_ptr32()
self._pad0040 = v_bytes(size=4)
self.WakeTime = v_uint64()
self.SleepTime = v_uint64()
self.WakeAlarmSignaled = v_uint32()
self._pad0058 = v_bytes(size=4)
self.WakeAlarm = vstruct.VArray([ _unnamed_35222() for i in xrange(3) ])
self.FilteredCapabilities = SYSTEM_POWER_CAPABILITIES()
self._pad00d8 = v_bytes(size=4)
class _unnamed_35379(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Argument1 = v_ptr32()
self.Argument2 = v_ptr32()
self.Argument3 = v_ptr32()
self.Argument4 = v_ptr32()
self.Argument5 = v_ptr32()
class EPROCESS_VALUES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.KernelTime = v_uint64()
self.UserTime = v_uint64()
self.CycleTime = v_uint64()
self.ContextSwitches = v_uint64()
self.ReadOperationCount = v_uint64()
self.WriteOperationCount = v_uint64()
self.OtherOperationCount = v_uint64()
self.ReadTransferCount = v_uint64()
self.WriteTransferCount = v_uint64()
self.OtherTransferCount = v_uint64()
class OBJECT_HEADER_CREATOR_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TypeList = LIST_ENTRY()
self.CreatorUniqueProcess = v_ptr32()
self.CreatorBackTraceIndex = v_uint16()
self.Reserved = v_uint16()
class PAGED_LOOKASIDE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.L = GENERAL_LOOKASIDE()
self.Lock__ObsoleteButDoNotDelete = FAST_MUTEX()
self._pad00c0 = v_bytes(size=32)
class MBCB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NodeTypeCode = v_uint16()
self.NodeIsInZone = v_uint16()
self.PagesToWrite = v_uint32()
self.DirtyPages = v_uint32()
self.Reserved = v_uint32()
self.BitmapRanges = LIST_ENTRY()
self.ResumeWritePage = v_uint64()
self.MostRecentlyDirtiedPage = v_uint64()
self.BitmapRange1 = BITMAP_RANGE()
self.BitmapRange2 = BITMAP_RANGE()
self.BitmapRange3 = BITMAP_RANGE()
class PROCESS_DISK_COUNTERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BytesRead = v_uint64()
self.BytesWritten = v_uint64()
self.ReadOperationCount = v_uint64()
self.WriteOperationCount = v_uint64()
self.FlushOperationCount = v_uint64()
class RTL_BITMAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SizeOfBitMap = v_uint32()
self.Buffer = v_ptr32()
class LARGE_INTEGER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.HighPart = v_uint32()
class IA64_DBGKD_CONTROL_SET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Continue = v_uint32()
self.CurrentSymbolStart = v_uint64()
self.CurrentSymbolEnd = v_uint64()
class PCW_REGISTRATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class _unnamed_27868(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.Key = v_uint32()
self.ByteOffset = LARGE_INTEGER()
class HBIN(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.FileOffset = v_uint32()
self.Size = v_uint32()
self.Reserved1 = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self.TimeStamp = LARGE_INTEGER()
self.Spare = v_uint32()
class _unnamed_28130(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InPath = v_uint8()
self.Reserved = vstruct.VArray([ v_uint8() for i in xrange(3) ])
self.Type = v_uint32()
class WHEA_AER_ENDPOINT_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Enabled = v_uint8()
self.Reserved = v_uint8()
self.BusNumber = v_uint32()
self.Slot = WHEA_PCI_SLOT_NUMBER()
self.DeviceControl = v_uint16()
self.Flags = AER_ENDPOINT_DESCRIPTOR_FLAGS()
self.UncorrectableErrorMask = v_uint32()
self.UncorrectableErrorSeverity = v_uint32()
self.CorrectableErrorMask = v_uint32()
self.AdvancedCapsAndControl = v_uint32()
class NPAGED_LOOKASIDE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.L = GENERAL_LOOKASIDE()
self.Lock__ObsoleteButDoNotDelete = v_uint32()
self._pad00c0 = v_bytes(size=60)
class LEARNING_MODE_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Settings = v_uint32()
self.Enabled = v_uint8()
self.PermissiveModeEnabled = v_uint8()
self._pad0008 = v_bytes(size=2)
class RTL_DYNAMIC_HASH_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint32()
self.Shift = v_uint32()
self.TableSize = v_uint32()
self.Pivot = v_uint32()
self.DivisorMask = v_uint32()
self.NumEntries = v_uint32()
self.NonEmptyBuckets = v_uint32()
self.NumEnumerators = v_uint32()
self.Directory = v_ptr32()
class BITMAP_RANGE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Links = LIST_ENTRY()
self.BasePage = v_uint64()
self.FirstDirtyPage = v_uint32()
self.LastDirtyPage = v_uint32()
self.DirtyPages = v_uint32()
self.Bitmap = v_ptr32()
class ETW_REG_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.RegList = LIST_ENTRY()
self.GuidEntry = v_ptr32()
self.ReplyQueue = v_ptr32()
self.SessionId = v_uint32()
self._pad001c = v_bytes(size=8)
self.Process = v_ptr32()
self.Callback = v_ptr32()
self.Index = v_uint16()
self.Flags = v_uint8()
self.EnableMask = v_uint8()
class PLATFORM_IDLE_STATE_ACCOUNTING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CancelCount = v_uint32()
self.FailureCount = v_uint32()
self.SuccessCount = v_uint32()
self._pad0010 = v_bytes(size=4)
self.MaxTime = v_uint64()
self.MinTime = v_uint64()
self.TotalTime = v_uint64()
self.InvalidBucketIndex = v_uint32()
self._pad0030 = v_bytes(size=4)
self.IdleTimeBuckets = vstruct.VArray([ PROC_IDLE_STATE_BUCKET() for i in xrange(26) ])
class KLOCK_QUEUE_HANDLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LockQueue = KSPIN_LOCK_QUEUE()
self.OldIrql = v_uint8()
self._pad000c = v_bytes(size=3)
class TRACE_LOGFILE_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BufferSize = v_uint32()
self.Version = v_uint32()
self.ProviderVersion = v_uint32()
self.NumberOfProcessors = v_uint32()
self.EndTime = LARGE_INTEGER()
self.TimerResolution = v_uint32()
self.MaximumFileSize = v_uint32()
self.LogFileMode = v_uint32()
self.BuffersWritten = v_uint32()
self.LogInstanceGuid = GUID()
self.LoggerName = v_ptr32()
self.LogFileName = v_ptr32()
self.TimeZone = RTL_TIME_ZONE_INFORMATION()
self._pad00f0 = v_bytes(size=4)
self.BootTime = LARGE_INTEGER()
self.PerfFreq = LARGE_INTEGER()
self.StartTime = LARGE_INTEGER()
self.ReservedFlags = v_uint32()
self.BuffersLost = v_uint32()
class CLIENT_ID32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.UniqueProcess = v_uint32()
self.UniqueThread = v_uint32()
class CLS_LSN(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.offset = _unnamed_36524()
class _unnamed_27432(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InitialPrivilegeSet = INITIAL_PRIVILEGE_SET()
class VPB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.Flags = v_uint16()
self.VolumeLabelLength = v_uint16()
self.DeviceObject = v_ptr32()
self.RealDevice = v_ptr32()
self.SerialNumber = v_uint32()
self.ReferenceCount = v_uint32()
self.VolumeLabel = vstruct.VArray([ v_uint16() for i in xrange(32) ])
class _unnamed_34115(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.MinBusNumber = v_uint32()
self.MaxBusNumber = v_uint32()
self.Reserved = v_uint32()
class WHEAP_ERROR_SOURCE_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.Count = v_uint32()
self.Items = LIST_ENTRY()
self.InsertLock = KEVENT()
class OBP_LOOKUP_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Directory = v_ptr32()
self.Object = v_ptr32()
self.EntryLink = v_ptr32()
self.HashValue = v_uint32()
self.HashIndex = v_uint16()
self.DirectoryLocked = v_uint8()
self.LockedExclusive = v_uint8()
self.LockStateSignature = v_uint32()
class OB_DUPLICATE_OBJECT_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SourceProcess = v_ptr32()
self.SourceHandle = v_ptr32()
self.Object = v_ptr32()
self.TargetAccess = v_uint32()
self.ObjectInfo = HANDLE_TABLE_ENTRY_INFO()
self.HandleAttributes = v_uint32()
class PP_LOOKASIDE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.P = v_ptr32()
self.L = v_ptr32()
class SEP_LOGON_SESSION_REFERENCES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.LogonId = LUID()
self.BuddyLogonId = LUID()
self.ReferenceCount = v_uint32()
self.Flags = v_uint32()
self.pDeviceMap = v_ptr32()
self.Token = v_ptr32()
self.AccountName = UNICODE_STRING()
self.AuthorityName = UNICODE_STRING()
self.LowBoxHandlesTable = SEP_LOWBOX_HANDLES_TABLE()
class JOBOBJECT_WAKE_FILTER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.HighEdgeFilter = v_uint32()
self.LowEdgeFilter = v_uint32()
class _unnamed_29797(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DockStatus = v_uint32()
self.ListEntry = LIST_ENTRY()
self.SerialNumber = v_ptr32()
class MMPTE_TIMESTAMP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MustBeZero = v_uint64()
class OBJECT_NAME_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Name = UNICODE_STRING()
class OBJECT_HEADER_PROCESS_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExclusiveProcess = v_ptr32()
self.Reserved = v_uint32()
class KUSER_SHARED_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TickCountLowDeprecated = v_uint32()
self.TickCountMultiplier = v_uint32()
self.InterruptTime = KSYSTEM_TIME()
self.SystemTime = KSYSTEM_TIME()
self.TimeZoneBias = KSYSTEM_TIME()
self.ImageNumberLow = v_uint16()
self.ImageNumberHigh = v_uint16()
self.NtSystemRoot = vstruct.VArray([ v_uint16() for i in xrange(260) ])
self.MaxStackTraceDepth = v_uint32()
self.CryptoExponent = v_uint32()
self.TimeZoneId = v_uint32()
self.LargePageMinimum = v_uint32()
self.AitSamplingValue = v_uint32()
self.AppCompatFlag = v_uint32()
self.RNGSeedVersion = v_uint64()
self.GlobalValidationRunlevel = v_uint32()
self.TimeZoneBiasStamp = v_uint32()
self.Reserved2 = v_uint32()
self.NtProductType = v_uint32()
self.ProductTypeIsValid = v_uint8()
self.Reserved0 = vstruct.VArray([ v_uint8() for i in xrange(1) ])
self.NativeProcessorArchitecture = v_uint16()
self.NtMajorVersion = v_uint32()
self.NtMinorVersion = v_uint32()
self.ProcessorFeatures = vstruct.VArray([ v_uint8() for i in xrange(64) ])
self.Reserved1 = v_uint32()
self.Reserved3 = v_uint32()
self.TimeSlip = v_uint32()
self.AlternativeArchitecture = v_uint32()
self.AltArchitecturePad = vstruct.VArray([ v_uint32() for i in xrange(1) ])
self.SystemExpirationDate = LARGE_INTEGER()
self.SuiteMask = v_uint32()
self.KdDebuggerEnabled = v_uint8()
self.MitigationPolicies = v_uint8()
self.Reserved6 = vstruct.VArray([ v_uint8() for i in xrange(2) ])
self.ActiveConsoleId = v_uint32()
self.DismountCount = v_uint32()
self.ComPlusPackage = v_uint32()
self.LastSystemRITEventTickCount = v_uint32()
self.NumberOfPhysicalPages = v_uint32()
self.SafeBootMode = v_uint8()
self.Reserved12 = vstruct.VArray([ v_uint8() for i in xrange(3) ])
self.SharedDataFlags = v_uint32()
self.DataFlagsPad = vstruct.VArray([ v_uint32() for i in xrange(1) ])
self.TestRetInstruction = v_uint64()
self.QpcFrequency = v_uint64()
self.SystemCallPad = vstruct.VArray([ v_uint64() for i in xrange(3) ])
self.TickCount = KSYSTEM_TIME()
self.TickCountPad = vstruct.VArray([ v_uint32() for i in xrange(1) ])
self.Cookie = v_uint32()
self.CookiePad = vstruct.VArray([ v_uint32() for i in xrange(1) ])
self.ConsoleSessionForegroundProcessId = v_uint64()
self.TimeUpdateSequence = v_uint64()
self.BaselineSystemTimeQpc = v_uint64()
self.BaselineInterruptTimeQpc = v_uint64()
self.QpcSystemTimeIncrement = v_uint64()
self.QpcInterruptTimeIncrement = v_uint64()
self.QpcSystemTimeIncrement32 = v_uint32()
self.QpcInterruptTimeIncrement32 = v_uint32()
self.QpcSystemTimeIncrementShift = v_uint8()
self.QpcInterruptTimeIncrementShift = v_uint8()
self.Reserved8 = vstruct.VArray([ v_uint8() for i in xrange(14) ])
self.UserModeGlobalLogger = vstruct.VArray([ v_uint16() for i in xrange(16) ])
self.ImageFileExecutionOptions = v_uint32()
self.LangGenerationCount = v_uint32()
self.Reserved4 = v_uint64()
self.InterruptTimeBias = v_uint64()
self.TscQpcBias = v_uint64()
self.ActiveProcessorCount = v_uint32()
self.ActiveGroupCount = v_uint8()
self.Reserved9 = v_uint8()
self.TscQpcData = v_uint16()
self.TimeZoneBiasEffectiveStart = LARGE_INTEGER()
self.TimeZoneBiasEffectiveEnd = LARGE_INTEGER()
self.XState = XSTATE_CONFIGURATION()
class SYSTEM_POWER_STATE_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Reserved1 = v_uint32()
class SYNCH_COUNTERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SpinLockAcquireCount = v_uint32()
self.SpinLockContentionCount = v_uint32()
self.SpinLockSpinCount = v_uint32()
self.IpiSendRequestBroadcastCount = v_uint32()
self.IpiSendRequestRoutineCount = v_uint32()
self.IpiSendSoftwareInterruptCount = v_uint32()
self.ExInitializeResourceCount = v_uint32()
self.ExReInitializeResourceCount = v_uint32()
self.ExDeleteResourceCount = v_uint32()
self.ExecutiveResourceAcquiresCount = v_uint32()
self.ExecutiveResourceContentionsCount = v_uint32()
self.ExecutiveResourceReleaseExclusiveCount = v_uint32()
self.ExecutiveResourceReleaseSharedCount = v_uint32()
self.ExecutiveResourceConvertsCount = v_uint32()
self.ExAcqResExclusiveAttempts = v_uint32()
self.ExAcqResExclusiveAcquiresExclusive = v_uint32()
self.ExAcqResExclusiveAcquiresExclusiveRecursive = v_uint32()
self.ExAcqResExclusiveWaits = v_uint32()
self.ExAcqResExclusiveNotAcquires = v_uint32()
self.ExAcqResSharedAttempts = v_uint32()
self.ExAcqResSharedAcquiresExclusive = v_uint32()
self.ExAcqResSharedAcquiresShared = v_uint32()
self.ExAcqResSharedAcquiresSharedRecursive = v_uint32()
self.ExAcqResSharedWaits = v_uint32()
self.ExAcqResSharedNotAcquires = v_uint32()
self.ExAcqResSharedStarveExclusiveAttempts = v_uint32()
self.ExAcqResSharedStarveExclusiveAcquiresExclusive = v_uint32()
self.ExAcqResSharedStarveExclusiveAcquiresShared = v_uint32()
self.ExAcqResSharedStarveExclusiveAcquiresSharedRecursive = v_uint32()
self.ExAcqResSharedStarveExclusiveWaits = v_uint32()
self.ExAcqResSharedStarveExclusiveNotAcquires = v_uint32()
self.ExAcqResSharedWaitForExclusiveAttempts = v_uint32()
self.ExAcqResSharedWaitForExclusiveAcquiresExclusive = v_uint32()
self.ExAcqResSharedWaitForExclusiveAcquiresShared = v_uint32()
self.ExAcqResSharedWaitForExclusiveAcquiresSharedRecursive = v_uint32()
self.ExAcqResSharedWaitForExclusiveWaits = v_uint32()
self.ExAcqResSharedWaitForExclusiveNotAcquires = v_uint32()
self.ExSetResOwnerPointerExclusive = v_uint32()
self.ExSetResOwnerPointerSharedNew = v_uint32()
self.ExSetResOwnerPointerSharedOld = v_uint32()
self.ExTryToAcqExclusiveAttempts = v_uint32()
self.ExTryToAcqExclusiveAcquires = v_uint32()
self.ExBoostExclusiveOwner = v_uint32()
self.ExBoostSharedOwners = v_uint32()
self.ExEtwSynchTrackingNotificationsCount = v_uint32()
self.ExEtwSynchTrackingNotificationsAccountedCount = v_uint32()
class KTM(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.cookie = v_uint32()
self.Mutex = KMUTANT()
self.State = v_uint32()
self.NamespaceLink = KTMOBJECT_NAMESPACE_LINK()
self.TmIdentity = GUID()
self.Flags = v_uint32()
self.VolatileFlags = v_uint32()
self.LogFileName = UNICODE_STRING()
self.LogFileObject = v_ptr32()
self.MarshallingContext = v_ptr32()
self.LogManagementContext = v_ptr32()
self.Transactions = KTMOBJECT_NAMESPACE()
self.ResourceManagers = KTMOBJECT_NAMESPACE()
self.LsnOrderedMutex = KMUTANT()
self.LsnOrderedList = LIST_ENTRY()
self.CommitVirtualClock = LARGE_INTEGER()
self.CommitVirtualClockMutex = FAST_MUTEX()
self.BaseLsn = CLS_LSN()
self.CurrentReadLsn = CLS_LSN()
self.LastRecoveredLsn = CLS_LSN()
self.TmRmHandle = v_ptr32()
self.TmRm = v_ptr32()
self.LogFullNotifyEvent = KEVENT()
self.CheckpointWorkItem = WORK_QUEUE_ITEM()
self.CheckpointTargetLsn = CLS_LSN()
self.LogFullCompletedWorkItem = WORK_QUEUE_ITEM()
self.LogWriteResource = ERESOURCE()
self.LogFlags = v_uint32()
self.LogFullStatus = v_uint32()
self.RecoveryStatus = v_uint32()
self._pad0218 = v_bytes(size=4)
self.LastCheckBaseLsn = CLS_LSN()
self.RestartOrderedList = LIST_ENTRY()
self.OfflineWorkItem = WORK_QUEUE_ITEM()
class PRIVATE_CACHE_MAP_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DontUse = v_uint32()
class VF_TARGET_VERIFIED_DRIVER_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SuspectDriverEntry = v_ptr32()
self.WMICallback = v_ptr32()
self.EtwHandlesListHead = LIST_ENTRY()
self.u1 = _unnamed_34363()
self.Signature = v_uint32()
self.PoolPageHeaders = SLIST_HEADER()
self.PoolTrackers = SLIST_HEADER()
self.CurrentPagedPoolAllocations = v_uint32()
self.CurrentNonPagedPoolAllocations = v_uint32()
self.PeakPagedPoolAllocations = v_uint32()
self.PeakNonPagedPoolAllocations = v_uint32()
self.PagedBytes = v_uint32()
self.NonPagedBytes = v_uint32()
self.PeakPagedBytes = v_uint32()
self.PeakNonPagedBytes = v_uint32()
self.RaiseIrqls = v_uint32()
self.AcquireSpinLocks = v_uint32()
self.SynchronizeExecutions = v_uint32()
self.AllocationsWithNoTag = v_uint32()
self.AllocationsFailed = v_uint32()
self.AllocationsFailedDeliberately = v_uint32()
self.LockedBytes = v_uint32()
self.PeakLockedBytes = v_uint32()
self.MappedLockedBytes = v_uint32()
self.PeakMappedLockedBytes = v_uint32()
self.MappedIoSpaceBytes = v_uint32()
self.PeakMappedIoSpaceBytes = v_uint32()
self.PagesForMdlBytes = v_uint32()
self.PeakPagesForMdlBytes = v_uint32()
self.ContiguousMemoryBytes = v_uint32()
self.PeakContiguousMemoryBytes = v_uint32()
self.ContiguousMemoryListHead = LIST_ENTRY()
class TEB64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NtTib = NT_TIB64()
self.EnvironmentPointer = v_uint64()
self.ClientId = CLIENT_ID64()
self.ActiveRpcHandle = v_uint64()
self.ThreadLocalStoragePointer = v_uint64()
self.ProcessEnvironmentBlock = v_uint64()
self.LastErrorValue = v_uint32()
self.CountOfOwnedCriticalSections = v_uint32()
self.CsrClientThread = v_uint64()
self.Win32ThreadInfo = v_uint64()
self.User32Reserved = vstruct.VArray([ v_uint32() for i in xrange(26) ])
self.UserReserved = vstruct.VArray([ v_uint32() for i in xrange(5) ])
self._pad0100 = v_bytes(size=4)
self.WOW32Reserved = v_uint64()
self.CurrentLocale = v_uint32()
self.FpSoftwareStatusRegister = v_uint32()
self.SystemReserved1 = vstruct.VArray([ v_uint64() for i in xrange(54) ])
self.ExceptionCode = v_uint32()
self._pad02c8 = v_bytes(size=4)
self.ActivationContextStackPointer = v_uint64()
self.SpareBytes = vstruct.VArray([ v_uint8() for i in xrange(24) ])
self.TxFsContext = v_uint32()
self._pad02f0 = v_bytes(size=4)
self.GdiTebBatch = GDI_TEB_BATCH64()
self.RealClientId = CLIENT_ID64()
self.GdiCachedProcessHandle = v_uint64()
self.GdiClientPID = v_uint32()
self.GdiClientTID = v_uint32()
self.GdiThreadLocalInfo = v_uint64()
self.Win32ClientInfo = vstruct.VArray([ v_uint64() for i in xrange(62) ])
self.glDispatchTable = vstruct.VArray([ v_uint64() for i in xrange(233) ])
self.glReserved1 = vstruct.VArray([ v_uint64() for i in xrange(29) ])
self.glReserved2 = v_uint64()
self.glSectionInfo = v_uint64()
self.glSection = v_uint64()
self.glTable = v_uint64()
self.glCurrentRC = v_uint64()
self.glContext = v_uint64()
self.LastStatusValue = v_uint32()
self._pad1258 = v_bytes(size=4)
self.StaticUnicodeString = STRING64()
self.StaticUnicodeBuffer = vstruct.VArray([ v_uint16() for i in xrange(261) ])
self._pad1478 = v_bytes(size=6)
self.DeallocationStack = v_uint64()
self.TlsSlots = vstruct.VArray([ v_uint64() for i in xrange(64) ])
self.TlsLinks = LIST_ENTRY64()
self.Vdm = v_uint64()
self.ReservedForNtRpc = v_uint64()
self.DbgSsReserved = vstruct.VArray([ v_uint64() for i in xrange(2) ])
self.HardErrorMode = v_uint32()
self._pad16b8 = v_bytes(size=4)
self.Instrumentation = vstruct.VArray([ v_uint64() for i in xrange(11) ])
self.ActivityId = GUID()
self.SubProcessTag = v_uint64()
self.PerflibData = v_uint64()
self.EtwTraceData = v_uint64()
self.WinSockData = v_uint64()
self.GdiBatchCount = v_uint32()
self.CurrentIdealProcessor = PROCESSOR_NUMBER()
self.GuaranteedStackBytes = v_uint32()
self._pad1750 = v_bytes(size=4)
self.ReservedForPerf = v_uint64()
self.ReservedForOle = v_uint64()
self.WaitingOnLoaderLock = v_uint32()
self._pad1768 = v_bytes(size=4)
self.SavedPriorityState = v_uint64()
self.ReservedForCodeCoverage = v_uint64()
self.ThreadPoolData = v_uint64()
self.TlsExpansionSlots = v_uint64()
self.DeallocationBStore = v_uint64()
self.BStoreLimit = v_uint64()
self.MuiGeneration = v_uint32()
self.IsImpersonating = v_uint32()
self.NlsCache = v_uint64()
self.pShimData = v_uint64()
self.HeapVirtualAffinity = v_uint16()
self.LowFragHeapDataSlot = v_uint16()
self._pad17b8 = v_bytes(size=4)
self.CurrentTransactionHandle = v_uint64()
self.ActiveFrame = v_uint64()
self.FlsData = v_uint64()
self.PreferredLanguages = v_uint64()
self.UserPrefLanguages = v_uint64()
self.MergedPrefLanguages = v_uint64()
self.MuiImpersonation = v_uint32()
self.CrossTebFlags = v_uint16()
self.SameTebFlags = v_uint16()
self.TxnScopeEnterCallback = v_uint64()
self.TxnScopeExitCallback = v_uint64()
self.TxnScopeContext = v_uint64()
self.LockCount = v_uint32()
self.SpareUlong0 = v_uint32()
self.ResourceRetValue = v_uint64()
self.ReservedForWdf = v_uint64()
class HANDLE_TRACE_DEBUG_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.RefCount = v_uint32()
self.TableSize = v_uint32()
self.BitMaskFlags = v_uint32()
self.CloseCompactionLock = FAST_MUTEX()
self.CurrentStackIndex = v_uint32()
self.TraceDb = vstruct.VArray([ HANDLE_TRACE_DB_ENTRY() for i in xrange(1) ])
class HCELL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint32()
self.u = _unnamed_29247()
class CM_RESOURCE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint32()
self.List = vstruct.VArray([ CM_FULL_RESOURCE_DESCRIPTOR() for i in xrange(1) ])
class WNF_STATE_NAME(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Data = vstruct.VArray([ v_uint32() for i in xrange(2) ])
class EPROCESS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Pcb = KPROCESS()
self.ProcessLock = EX_PUSH_LOCK()
self._pad00a8 = v_bytes(size=4)
self.CreateTime = LARGE_INTEGER()
self.RundownProtect = EX_RUNDOWN_REF()
self.UniqueProcessId = v_ptr32()
self.ActiveProcessLinks = LIST_ENTRY()
self.Flags2 = v_uint32()
self.Flags = v_uint32()
self.ProcessQuotaUsage = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self.ProcessQuotaPeak = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self.PeakVirtualSize = v_uint32()
self.VirtualSize = v_uint32()
self.SessionProcessLinks = LIST_ENTRY()
self.ExceptionPortData = v_ptr32()
self.Token = EX_FAST_REF()
self.WorkingSetPage = v_uint32()
self.AddressCreationLock = EX_PUSH_LOCK()
self.RotateInProgress = v_ptr32()
self.ForkInProgress = v_ptr32()
self.HardwareTrigger = v_uint32()
self.CommitChargeJob = v_ptr32()
self.CloneRoot = v_ptr32()
self.NumberOfPrivatePages = v_uint32()
self.NumberOfLockedPages = v_uint32()
self.Win32Process = v_ptr32()
self.Job = v_ptr32()
self.SectionObject = v_ptr32()
self.SectionBaseAddress = v_ptr32()
self.Cookie = v_uint32()
self.VdmObjects = v_ptr32()
self.WorkingSetWatch = v_ptr32()
self.Win32WindowStation = v_ptr32()
self.InheritedFromUniqueProcessId = v_ptr32()
self.LdtInformation = v_ptr32()
self.CreatorProcess = v_ptr32()
self.Peb = v_ptr32()
self.Session = v_ptr32()
self.AweInfo = v_ptr32()
self.QuotaBlock = v_ptr32()
self.ObjectTable = v_ptr32()
self.DebugPort = v_ptr32()
self.PaeTop = v_ptr32()
self.DeviceMap = v_ptr32()
self.EtwDataSource = v_ptr32()
self._pad0168 = v_bytes(size=4)
self.PageDirectoryPte = v_uint64()
self.ImageFileName = vstruct.VArray([ v_uint8() for i in xrange(15) ])
self.PriorityClass = v_uint8()
self.SecurityPort = v_ptr32()
self.SeAuditProcessCreationInfo = SE_AUDIT_PROCESS_CREATION_INFO()
self.JobLinks = LIST_ENTRY()
self.HighestUserAddress = v_ptr32()
self.ThreadListHead = LIST_ENTRY()
self.ActiveThreads = v_uint32()
self.ImagePathHash = v_uint32()
self.DefaultHardErrorProcessing = v_uint32()
self.LastThreadExitStatus = v_uint32()
self.PrefetchTrace = EX_FAST_REF()
self.LockedPagesList = v_ptr32()
self._pad01b8 = v_bytes(size=4)
self.ReadOperationCount = LARGE_INTEGER()
self.WriteOperationCount = LARGE_INTEGER()
self.OtherOperationCount = LARGE_INTEGER()
self.ReadTransferCount = LARGE_INTEGER()
self.WriteTransferCount = LARGE_INTEGER()
self.OtherTransferCount = LARGE_INTEGER()
self.CommitChargeLimit = v_uint32()
self.CommitCharge = v_uint32()
self.CommitChargePeak = v_uint32()
self.Vm = MMSUPPORT()
self.MmProcessLinks = LIST_ENTRY()
self.ModifiedPageCount = v_uint32()
self.ExitStatus = v_uint32()
self.VadRoot = MM_AVL_TABLE()
self.VadPhysicalPages = v_uint32()
self.VadPhysicalPagesLimit = v_uint32()
self.AlpcContext = ALPC_PROCESS_CONTEXT()
self.TimerResolutionLink = LIST_ENTRY()
self.TimerResolutionStackRecord = v_ptr32()
self.RequestedTimerResolution = v_uint32()
self.SmallestTimerResolution = v_uint32()
self.ExitTime = LARGE_INTEGER()
self.ActiveThreadsHighWatermark = v_uint32()
self.LargePrivateVadCount = v_uint32()
self.ThreadListLock = EX_PUSH_LOCK()
self.WnfContext = v_ptr32()
self.SectionMappingSize = v_uint32()
self.SignatureLevel = v_uint8()
self.SectionSignatureLevel = v_uint8()
self.SpareByte20 = vstruct.VArray([ v_uint8() for i in xrange(2) ])
self.KeepAliveCounter = v_uint32()
self.DiskCounters = v_ptr32()
self.LastFreezeInterruptTime = v_uint64()
class ALPC_PORT_ATTRIBUTES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint32()
self.SecurityQos = SECURITY_QUALITY_OF_SERVICE()
self.MaxMessageLength = v_uint32()
self.MemoryBandwidth = v_uint32()
self.MaxPoolUsage = v_uint32()
self.MaxSectionSize = v_uint32()
self.MaxViewSize = v_uint32()
self.MaxTotalSectionSize = v_uint32()
self.DupObjectTypes = v_uint32()
class KSCHEDULING_GROUP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Value = v_uint16()
self.Type = v_uint8()
self.HardCap = v_uint8()
self.RelativeWeight = v_uint32()
self.QueryHistoryTimeStamp = v_uint64()
self.NotificationCycles = v_uint64()
self.SchedulingGroupList = LIST_ENTRY()
self.NotificationDpc = v_ptr32()
self._pad0040 = v_bytes(size=28)
self.PerProcessor = vstruct.VArray([ KSCB() for i in xrange(1) ])
self._pad0140 = v_bytes(size=48)
class _unnamed_28009(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Vpb = v_ptr32()
self.DeviceObject = v_ptr32()
class POWER_SEQUENCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SequenceD1 = v_uint32()
self.SequenceD2 = v_uint32()
self.SequenceD3 = v_uint32()
class EVENT_RECORD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.EventHeader = EVENT_HEADER()
self.BufferContext = ETW_BUFFER_CONTEXT()
self.ExtendedDataCount = v_uint16()
self.UserDataLength = v_uint16()
self.ExtendedData = v_ptr32()
self.UserData = v_ptr32()
self.UserContext = v_ptr32()
self._pad0068 = v_bytes(size=4)
class IO_DRIVER_CREATE_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self._pad0004 = v_bytes(size=2)
self.ExtraCreateParameter = v_ptr32()
self.DeviceObjectHint = v_ptr32()
self.TxnParameters = v_ptr32()
class _unnamed_29247(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NewCell = _unnamed_34026()
class KTIMER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DISPATCHER_HEADER()
self.DueTime = ULARGE_INTEGER()
self.TimerListEntry = LIST_ENTRY()
self.Dpc = v_ptr32()
self.Period = v_uint32()
class _unnamed_34689(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DataSize = v_uint32()
self.Reserved1 = v_uint32()
self.Reserved2 = v_uint32()
class HIVE_LIST_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FileName = v_ptr32()
self.BaseName = v_ptr32()
self.RegRootName = v_ptr32()
self.CmHive = v_ptr32()
self.HHiveFlags = v_uint32()
self.CmHiveFlags = v_uint32()
self.CmKcbCacheSize = v_uint32()
self.CmHive2 = v_ptr32()
self.HiveMounted = v_uint8()
self.ThreadFinished = v_uint8()
self.ThreadStarted = v_uint8()
self.Allocate = v_uint8()
self.WinPERequired = v_uint8()
self._pad0028 = v_bytes(size=3)
self.StartEvent = KEVENT()
self.FinishedEvent = KEVENT()
self.MountLock = KEVENT()
class WHEA_ERROR_STATUS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ErrorStatus = v_uint64()
class CM_PARTIAL_RESOURCE_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint8()
self.ShareDisposition = v_uint8()
self.Flags = v_uint16()
self.u = _unnamed_33994()
class RTLP_RANGE_LIST_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = v_uint64()
self.End = v_uint64()
self.Allocated = _unnamed_35924()
self.Attributes = v_uint8()
self.PublicFlags = v_uint8()
self.PrivateFlags = v_uint16()
self.ListEntry = LIST_ENTRY()
self._pad0028 = v_bytes(size=4)
class EVENT_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Id = v_uint16()
self.Version = v_uint8()
self.Channel = v_uint8()
self.Level = v_uint8()
self.Opcode = v_uint8()
self.Task = v_uint16()
self.Keyword = v_uint64()
class WHEAP_ERROR_SOURCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.FailedAllocations = v_uint32()
self.PlatformErrorSourceId = v_uint32()
self.ErrorCount = v_uint32()
self.RecordCount = v_uint32()
self.RecordLength = v_uint32()
self.PoolTag = v_uint32()
self.Type = v_uint32()
self.Records = v_ptr32()
self.Context = v_ptr32()
self.SectionCount = v_uint32()
self.SectionLength = v_uint32()
self._pad0038 = v_bytes(size=4)
self.TickCountAtLastError = LARGE_INTEGER()
self.AccumulatedErrors = v_uint32()
self.TotalErrors = v_uint32()
self.Deferred = v_uint8()
self.Descriptor = WHEA_ERROR_SOURCE_DESCRIPTOR()
self._pad0418 = v_bytes(size=3)
class OBJECT_ATTRIBUTES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.RootDirectory = v_ptr32()
self.ObjectName = v_ptr32()
self.Attributes = v_uint32()
self.SecurityDescriptor = v_ptr32()
self.SecurityQualityOfService = v_ptr32()
class OBJECT_HEADER_AUDIT_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityDescriptor = v_ptr32()
self.Reserved = v_uint32()
class EVENT_HEADER_EXTENDED_DATA_ITEM(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Reserved1 = v_uint16()
self.ExtType = v_uint16()
self.Linkage = v_uint16()
self.DataSize = v_uint16()
self.DataPtr = v_uint64()
class _unnamed_29028(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NumberOfSystemCacheViews = v_uint32()
self.WritableUserReferences = v_uint32()
self.SubsectionRoot = v_ptr32()
class CM_FULL_RESOURCE_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InterfaceType = v_uint32()
self.BusNumber = v_uint32()
self.PartialResourceList = CM_PARTIAL_RESOURCE_LIST()
class _unnamed_27833(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityContext = v_ptr32()
self.Options = v_uint32()
self.FileAttributes = v_uint16()
self.ShareAccess = v_uint16()
self.EaLength = v_uint32()
class DBGKD_CONTEXT_EX(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Offset = v_uint32()
self.ByteCount = v_uint32()
self.BytesCopied = v_uint32()
class DBGKD_GET_VERSION64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MajorVersion = v_uint16()
self.MinorVersion = v_uint16()
self.ProtocolVersion = v_uint8()
self.KdSecondaryVersion = v_uint8()
self.Flags = v_uint16()
self.MachineType = v_uint16()
self.MaxPacketType = v_uint8()
self.MaxStateChange = v_uint8()
self.MaxManipulate = v_uint8()
self.Simulation = v_uint8()
self.Unused = vstruct.VArray([ v_uint16() for i in xrange(1) ])
self.KernBase = v_uint64()
self.PsLoadedModuleList = v_uint64()
self.DebuggerDataList = v_uint64()
class POP_RW_LOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Lock = EX_PUSH_LOCK()
self.Thread = v_ptr32()
class KTIMER_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TimerExpiry = vstruct.VArray([ v_ptr32() for i in xrange(16) ])
self.TimerEntries = vstruct.VArray([ KTIMER_TABLE_ENTRY() for i in xrange(256) ])
class PROC_IDLE_POLICY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PromotePercent = v_uint8()
self.DemotePercent = v_uint8()
self.PromotePercentBase = v_uint8()
self.DemotePercentBase = v_uint8()
self.AllowScaling = v_uint8()
class _unnamed_30828(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.s1 = _unnamed_30885()
class FAST_IO_DISPATCH(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SizeOfFastIoDispatch = v_uint32()
self.FastIoCheckIfPossible = v_ptr32()
self.FastIoRead = v_ptr32()
self.FastIoWrite = v_ptr32()
self.FastIoQueryBasicInfo = v_ptr32()
self.FastIoQueryStandardInfo = v_ptr32()
self.FastIoLock = v_ptr32()
self.FastIoUnlockSingle = v_ptr32()
self.FastIoUnlockAll = v_ptr32()
self.FastIoUnlockAllByKey = v_ptr32()
self.FastIoDeviceControl = v_ptr32()
self.AcquireFileForNtCreateSection = v_ptr32()
self.ReleaseFileForNtCreateSection = v_ptr32()
self.FastIoDetachDevice = v_ptr32()
self.FastIoQueryNetworkOpenInfo = v_ptr32()
self.AcquireForModWrite = v_ptr32()
self.MdlRead = v_ptr32()
self.MdlReadComplete = v_ptr32()
self.PrepareMdlWrite = v_ptr32()
self.MdlWriteComplete = v_ptr32()
self.FastIoReadCompressed = v_ptr32()
self.FastIoWriteCompressed = v_ptr32()
self.MdlReadCompleteCompressed = v_ptr32()
self.MdlWriteCompleteCompressed = v_ptr32()
self.FastIoQueryOpen = v_ptr32()
self.ReleaseForModWrite = v_ptr32()
self.AcquireForCcFlush = v_ptr32()
self.ReleaseForCcFlush = v_ptr32()
class _unnamed_28142(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PowerState = v_uint32()
class SLIST_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Alignment = v_uint64()
class CM_KEY_CONTROL_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.RefCount = v_uint32()
self.ExtFlags = v_uint32()
self.DelayedDeref = v_uint32()
self.KeyHash = CM_KEY_HASH()
self.KcbPushlock = EX_PUSH_LOCK()
self.Owner = v_ptr32()
self.SlotHint = v_uint32()
self.ParentKcb = v_ptr32()
self.NameBlock = v_ptr32()
self.CachedSecurity = v_ptr32()
self.ValueCache = CACHED_CHILD_LIST()
self.IndexHint = v_ptr32()
self.KeyBodyListHead = LIST_ENTRY()
self.KeyBodyArray = vstruct.VArray([ v_ptr32() for i in xrange(4) ])
self.KcbLastWriteTime = LARGE_INTEGER()
self.KcbMaxNameLen = v_uint16()
self.KcbMaxValueNameLen = v_uint16()
self.KcbMaxValueDataLen = v_uint32()
self.KcbUserFlags = v_uint32()
self.KCBUoWListHead = LIST_ENTRY()
self.DelayQueueEntry = LIST_ENTRY()
self.TransKCBOwner = v_ptr32()
self.KCBLock = CM_INTENT_LOCK()
self.KeyLock = CM_INTENT_LOCK()
self.TransValueCache = CHILD_LIST()
self.TransValueListOwner = v_ptr32()
self.FullKCBName = v_ptr32()
class MMPFNLIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Total = v_uint32()
self.ListName = v_uint32()
self.Flink = v_uint32()
self.Blink = v_uint32()
self.Lock = v_uint32()
class NB10(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.Offset = v_uint32()
self.TimeStamp = v_uint32()
self.Age = v_uint32()
self.PdbName = vstruct.VArray([ v_uint8() for i in xrange(1) ])
self._pad0014 = v_bytes(size=3)
class RTL_DYNAMIC_HASH_TABLE_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ChainHead = v_ptr32()
self.PrevLinkage = v_ptr32()
self.Signature = v_uint32()
class MMWSL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FirstFree = v_uint32()
self.FirstDynamic = v_uint32()
self.LastEntry = v_uint32()
self.NextSlot = v_uint32()
self.LastInitializedWsle = v_uint32()
self.NextAgingSlot = v_uint32()
self.NextAccessClearingSlot = v_uint32()
self.LastAccessClearingRemainder = v_uint32()
self.LastAgingRemainder = v_uint32()
self.WsleSize = v_uint32()
self.NonDirectCount = v_uint32()
self.LowestPagableAddress = v_ptr32()
self.NonDirectHash = v_ptr32()
self.HashTableStart = v_ptr32()
self.HighestPermittedHashAddress = v_ptr32()
self.ActiveWsleCounts = vstruct.VArray([ v_uint32() for i in xrange(8) ])
self.ActiveWsles = vstruct.VArray([ MI_ACTIVE_WSLE() for i in xrange(8) ])
self.Wsle = v_ptr32()
self.UserVaInfo = MI_USER_VA_INFO()
class KTMOBJECT_NAMESPACE_LINK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Links = RTL_BALANCED_LINKS()
self.Expired = v_uint8()
self._pad0014 = v_bytes(size=3)
class MI_IMAGE_SECURITY_REFERENCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityContext = IMAGE_SECURITY_CONTEXT()
self.DynamicRelocations = v_ptr32()
class WHEA_MEMORY_ERROR_SECTION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ValidBits = WHEA_MEMORY_ERROR_SECTION_VALIDBITS()
self.ErrorStatus = WHEA_ERROR_STATUS()
self.PhysicalAddress = v_uint64()
self.PhysicalAddressMask = v_uint64()
self.Node = v_uint16()
self.Card = v_uint16()
self.Module = v_uint16()
self.Bank = v_uint16()
self.Device = v_uint16()
self.Row = v_uint16()
self.Column = v_uint16()
self.BitPosition = v_uint16()
self.RequesterId = v_uint64()
self.ResponderId = v_uint64()
self.TargetId = v_uint64()
self.ErrorType = v_uint8()
class DBGKD_CONTINUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ContinueStatus = v_uint32()
class PROC_IDLE_STATE_ACCOUNTING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TotalTime = v_uint64()
self.CancelCount = v_uint32()
self.FailureCount = v_uint32()
self.SuccessCount = v_uint32()
self.InvalidBucketIndex = v_uint32()
self.MinTime = v_uint64()
self.MaxTime = v_uint64()
self.IdleTimeBuckets = vstruct.VArray([ PROC_IDLE_STATE_BUCKET() for i in xrange(26) ])
class CALL_HASH_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.CallersAddress = v_ptr32()
self.CallersCaller = v_ptr32()
self.CallCount = v_uint32()
class MMSESSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SystemSpaceViewLock = FAST_MUTEX()
self.SystemSpaceViewLockPointer = v_ptr32()
self.SystemSpaceViewTable = v_ptr32()
self.SystemSpaceHashSize = v_uint32()
self.SystemSpaceHashEntries = v_uint32()
self.SystemSpaceHashKey = v_uint32()
self.BitmapFailures = v_uint32()
class WORK_QUEUE_ITEM(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.List = LIST_ENTRY()
self.WorkerRoutine = v_ptr32()
self.Parameter = v_ptr32()
class _unnamed_36838(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeviceNumber = v_uint32()
class _unnamed_32530(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BaseMid = v_uint8()
self.Flags1 = v_uint8()
self.Flags2 = v_uint8()
self.BaseHi = v_uint8()
class KSPECIAL_REGISTERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Cr0 = v_uint32()
self.Cr2 = v_uint32()
self.Cr3 = v_uint32()
self.Cr4 = v_uint32()
self.KernelDr0 = v_uint32()
self.KernelDr1 = v_uint32()
self.KernelDr2 = v_uint32()
self.KernelDr3 = v_uint32()
self.KernelDr6 = v_uint32()
self.KernelDr7 = v_uint32()
self.Gdtr = DESCRIPTOR()
self.Idtr = DESCRIPTOR()
self.Tr = v_uint16()
self.Ldtr = v_uint16()
self.Xcr0 = v_uint64()
self.Reserved = vstruct.VArray([ v_uint32() for i in xrange(4) ])
class POWER_ACTION_POLICY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Action = v_uint32()
self.Flags = v_uint32()
self.EventCode = v_uint32()
class IMAGE_SECTION_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Name = vstruct.VArray([ v_uint8() for i in xrange(8) ])
self.Misc = _unnamed_34731()
self.VirtualAddress = v_uint32()
self.SizeOfRawData = v_uint32()
self.PointerToRawData = v_uint32()
self.PointerToRelocations = v_uint32()
self.PointerToLinenumbers = v_uint32()
self.NumberOfRelocations = v_uint16()
self.NumberOfLinenumbers = v_uint16()
self.Characteristics = v_uint32()
class _unnamed_32122(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FilePointerIndex = v_uint32()
class _unnamed_32123(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FilePointerIndex = v_uint32()
class DBGKM_EXCEPTION64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExceptionRecord = EXCEPTION_RECORD64()
self.FirstChance = v_uint32()
self._pad00a0 = v_bytes(size=4)
class _unnamed_29796(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NextResourceDeviceNode = v_ptr32()
class _unnamed_29795(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LegacyDeviceNode = v_ptr32()
class XSAVE_FORMAT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ControlWord = v_uint16()
self.StatusWord = v_uint16()
self.TagWord = v_uint8()
self.Reserved1 = v_uint8()
self.ErrorOpcode = v_uint16()
self.ErrorOffset = v_uint32()
self.ErrorSelector = v_uint16()
self.Reserved2 = v_uint16()
self.DataOffset = v_uint32()
self.DataSelector = v_uint16()
self.Reserved3 = v_uint16()
self.MxCsr = v_uint32()
self.MxCsr_Mask = v_uint32()
self.FloatRegisters = vstruct.VArray([ M128A() for i in xrange(8) ])
self.XmmRegisters = vstruct.VArray([ M128A() for i in xrange(8) ])
self.Reserved4 = vstruct.VArray([ v_uint8() for i in xrange(220) ])
self.Cr0NpxState = v_uint32()
class KSYSTEM_TIME(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.High1Time = v_uint32()
self.High2Time = v_uint32()
class SEGMENT_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TotalNumberOfPtes4132 = v_uint16()
self.FloppyMedia = v_uint8()
self.ILOnly = v_uint8()
class PO_DEVICE_NOTIFY_ORDER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Locked = v_uint8()
self._pad0004 = v_bytes(size=3)
self.WarmEjectPdoPointer = v_ptr32()
self.OrderLevel = vstruct.VArray([ PO_NOTIFY_ORDER_LEVEL() for i in xrange(5) ])
class PROCESSOR_IDLE_CONSTRAINTS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TotalTime = v_uint64()
self.IdleTime = v_uint64()
self.ExpectedIdleDuration = v_uint64()
self.MaxIdleDuration = v_uint32()
self.OverrideState = v_uint32()
self.TimeCheck = v_uint32()
self.PromotePercent = v_uint8()
self.DemotePercent = v_uint8()
self.Parked = v_uint8()
self.Interruptible = v_uint8()
self.PlatformIdle = v_uint8()
self._pad0030 = v_bytes(size=7)
class FLOATING_SAVE_AREA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ControlWord = v_uint32()
self.StatusWord = v_uint32()
self.TagWord = v_uint32()
self.ErrorOffset = v_uint32()
self.ErrorSelector = v_uint32()
self.DataOffset = v_uint32()
self.DataSelector = v_uint32()
self.RegisterArea = vstruct.VArray([ v_uint8() for i in xrange(80) ])
self.Cr0NpxState = v_uint32()
class POP_FX_PROVIDER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Index = v_uint32()
self.Activating = v_uint8()
self._pad0008 = v_bytes(size=3)
class KQUEUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DISPATCHER_HEADER()
self.EntryListHead = LIST_ENTRY()
self.CurrentCount = v_uint32()
self.MaximumCount = v_uint32()
self.ThreadListHead = LIST_ENTRY()
class DEVICE_DESCRIPTION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Version = v_uint32()
self.Master = v_uint8()
self.ScatterGather = v_uint8()
self.DemandMode = v_uint8()
self.AutoInitialize = v_uint8()
self.Dma32BitAddresses = v_uint8()
self.IgnoreCount = v_uint8()
self.Reserved1 = v_uint8()
self.Dma64BitAddresses = v_uint8()
self.BusNumber = v_uint32()
self.DmaChannel = v_uint32()
self.InterfaceType = v_uint32()
self.DmaWidth = v_uint32()
self.DmaSpeed = v_uint32()
self.MaximumLength = v_uint32()
self.DmaPort = v_uint32()
self.DmaAddressWidth = v_uint32()
self.DmaControllerInstance = v_uint32()
self.DmaRequestLine = v_uint32()
self._pad0038 = v_bytes(size=4)
self.DeviceAddress = LARGE_INTEGER()
class _unnamed_29151(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Space = v_uint32()
self.MapPoint = v_uint32()
self.BinPoint = v_ptr32()
class _unnamed_29150(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.List = v_ptr32()
self.Index = v_uint32()
self.Cell = v_uint32()
self.CellPoint = v_ptr32()
class _unnamed_29153(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FileOffset = v_uint32()
class _unnamed_29152(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Bin = v_ptr32()
self.CellPoint = v_ptr32()
class SEGMENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ControlArea = v_ptr32()
self.TotalNumberOfPtes = v_uint32()
self.SegmentFlags = SEGMENT_FLAGS()
self.NumberOfCommittedPages = v_uint32()
self.SizeOfSegment = v_uint64()
self.ExtendInfo = v_ptr32()
self.SegmentLock = EX_PUSH_LOCK()
self.u1 = _unnamed_28990()
self.u2 = _unnamed_28991()
self.PrototypePte = v_ptr32()
self._pad0030 = v_bytes(size=4)
class _unnamed_37222(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.CheckSum = v_uint32()
class LUID_AND_ATTRIBUTES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Luid = LUID()
self.Attributes = v_uint32()
class _unnamed_37225(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DiskId = GUID()
class iobuf(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ptr = v_ptr32()
self.cnt = v_uint32()
self.base = v_ptr32()
self.flag = v_uint32()
self.file = v_uint32()
self.charbuf = v_uint32()
self.bufsiz = v_uint32()
self.tmpfname = v_ptr32()
class PCW_CALLBACK_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AddCounter = PCW_COUNTER_INFORMATION()
self._pad0020 = v_bytes(size=16)
class PCW_COUNTER_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Id = v_uint16()
self.StructIndex = v_uint16()
self.Offset = v_uint16()
self.Size = v_uint16()
class MMMOD_WRITER_MDL_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Links = LIST_ENTRY()
self.u = _unnamed_29100()
self.Irp = v_ptr32()
self.u1 = MODWRITER_FLAGS()
self.ByteCount = v_uint32()
self.PagingFile = v_ptr32()
self.File = v_ptr32()
self.ControlArea = v_ptr32()
self.FileResource = v_ptr32()
self._pad0030 = v_bytes(size=4)
self.WriteOffset = LARGE_INTEGER()
self.IssueTime = LARGE_INTEGER()
self.PointerMdl = v_ptr32()
self.Mdl = MDL()
self.Page = vstruct.VArray([ v_uint32() for i in xrange(1) ])
self._pad0068 = v_bytes(size=4)
class CACHED_CHILD_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint32()
self.ValueList = v_uint32()
class PCW_MASK_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CounterMask = v_uint64()
self.InstanceMask = v_ptr32()
self.InstanceId = v_uint32()
self.CollectMultiple = v_uint8()
self._pad0014 = v_bytes(size=3)
self.Buffer = v_ptr32()
self.CancelEvent = v_ptr32()
self._pad0020 = v_bytes(size=4)
class KTHREAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DISPATCHER_HEADER()
self.SListFaultAddress = v_ptr32()
self._pad0018 = v_bytes(size=4)
self.QuantumTarget = v_uint64()
self.InitialStack = v_ptr32()
self.StackLimit = v_ptr32()
self.StackBase = v_ptr32()
self.ThreadLock = v_uint32()
self.CycleTime = v_uint64()
self.HighCycleTime = v_uint32()
self.ServiceTable = v_ptr32()
self.CurrentRunTime = v_uint32()
self.ExpectedRunTime = v_uint32()
self.KernelStack = v_ptr32()
self.StateSaveArea = v_ptr32()
self.SchedulingGroup = v_ptr32()
self.WaitRegister = KWAIT_STATUS_REGISTER()
self.Running = v_uint8()
self.Alerted = vstruct.VArray([ v_uint8() for i in xrange(2) ])
self.KernelStackResident = v_uint32()
self.AutoAlignment = v_uint32()
self.Spare0 = v_uint32()
self.SystemCallNumber = v_uint32()
self.FirstArgument = v_ptr32()
self.TrapFrame = v_ptr32()
self.ApcState = KAPC_STATE()
self.UserIdealProcessor = v_uint32()
self.ContextSwitches = v_uint32()
self.State = v_uint8()
self.NpxState = v_uint8()
self.WaitIrql = v_uint8()
self.WaitMode = v_uint8()
self.WaitStatus = v_uint32()
self.WaitBlockList = v_ptr32()
self.WaitListEntry = LIST_ENTRY()
self.Queue = v_ptr32()
self.Teb = v_ptr32()
self._pad00b0 = v_bytes(size=4)
self.RelativeTimerBias = v_uint64()
self.Timer = KTIMER()
self.WaitBlock = vstruct.VArray([ KWAIT_BLOCK() for i in xrange(4) ])
self.QueueListEntry = LIST_ENTRY()
self.NextProcessor = v_uint32()
self.DeferredProcessor = v_uint32()
self.Process = v_ptr32()
self.UserAffinity = GROUP_AFFINITY()
self.Affinity = GROUP_AFFINITY()
self.ApcStatePointer = vstruct.VArray([ v_ptr32() for i in xrange(2) ])
self.SavedApcState = KAPC_STATE()
self.SuspendCount = v_uint8()
self.Saturation = v_uint8()
self.SListFaultCount = v_uint16()
self.SchedulerApc = KAPC()
self.UserTime = v_uint32()
self.SuspendEvent = KEVENT()
self.ThreadListEntry = LIST_ENTRY()
self.MutantListHead = LIST_ENTRY()
self._pad01e8 = v_bytes(size=4)
class _unnamed_34124(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length40 = v_uint32()
self.Alignment40 = v_uint32()
self.MinimumAddress = LARGE_INTEGER()
self.MaximumAddress = LARGE_INTEGER()
class _unnamed_34120(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Priority = v_uint32()
self.Reserved1 = v_uint32()
self.Reserved2 = v_uint32()
class ALPC_PORT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PortListEntry = LIST_ENTRY()
self.CommunicationInfo = v_ptr32()
self.OwnerProcess = v_ptr32()
self.CompletionPort = v_ptr32()
self.CompletionKey = v_ptr32()
self.CompletionPacketLookaside = v_ptr32()
self.PortContext = v_ptr32()
self.StaticSecurity = SECURITY_CLIENT_CONTEXT()
self.IncomingQueueLock = EX_PUSH_LOCK()
self.MainQueue = LIST_ENTRY()
self.LargeMessageQueue = LIST_ENTRY()
self.PendingQueueLock = EX_PUSH_LOCK()
self.PendingQueue = LIST_ENTRY()
self.WaitQueueLock = EX_PUSH_LOCK()
self.WaitQueue = LIST_ENTRY()
self.Semaphore = v_ptr32()
self.PortAttributes = ALPC_PORT_ATTRIBUTES()
self.ResourceListLock = EX_PUSH_LOCK()
self.ResourceListHead = LIST_ENTRY()
self.PortObjectLock = EX_PUSH_LOCK()
self.CompletionList = v_ptr32()
self.MessageZone = v_ptr32()
self.CallbackObject = v_ptr32()
self.CallbackContext = v_ptr32()
self.CanceledQueue = LIST_ENTRY()
self.SequenceNo = v_uint32()
self.u1 = _unnamed_30882()
self.TargetQueuePort = v_ptr32()
self.TargetSequencePort = v_ptr32()
self.CachedMessage = v_ptr32()
self.MainQueueLength = v_uint32()
self.LargeMessageQueueLength = v_uint32()
self.PendingQueueLength = v_uint32()
self.CanceledQueueLength = v_uint32()
self.WaitQueueLength = v_uint32()
class WHEAP_ERROR_RECORD_WRAPPER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WorkEntry = LIST_ENTRY()
self.Length = v_uint32()
self.ProcessorNumber = v_uint32()
self.Flags = WHEAP_ERROR_RECORD_WRAPPER_FLAGS()
self.InUse = v_uint32()
self.ErrorSource = v_ptr32()
self.ErrorRecord = WHEA_ERROR_RECORD()
class ADAPTER_OBJECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class _unnamed_34658(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = LARGE_INTEGER()
self.Length = v_uint32()
class _unnamed_34129(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length48 = v_uint32()
self.Alignment48 = v_uint32()
self.MinimumAddress = LARGE_INTEGER()
self.MaximumAddress = LARGE_INTEGER()
class _unnamed_28175(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SystemContext = v_uint32()
self.Type = v_uint32()
self.State = POWER_STATE()
self.ShutdownType = v_uint32()
class CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ContextFlags = v_uint32()
self.Dr0 = v_uint32()
self.Dr1 = v_uint32()
self.Dr2 = v_uint32()
self.Dr3 = v_uint32()
self.Dr6 = v_uint32()
self.Dr7 = v_uint32()
self.FloatSave = FLOATING_SAVE_AREA()
self.SegGs = v_uint32()
self.SegFs = v_uint32()
self.SegEs = v_uint32()
self.SegDs = v_uint32()
self.Edi = v_uint32()
self.Esi = v_uint32()
self.Ebx = v_uint32()
self.Edx = v_uint32()
self.Ecx = v_uint32()
self.Eax = v_uint32()
self.Ebp = v_uint32()
self.Eip = v_uint32()
self.SegCs = v_uint32()
self.EFlags = v_uint32()
self.Esp = v_uint32()
self.SegSs = v_uint32()
self.ExtendedRegisters = vstruct.VArray([ v_uint8() for i in xrange(512) ])
class _unnamed_34088(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.Alignment = v_uint32()
self.MinimumAddress = LARGE_INTEGER()
self.MaximumAddress = LARGE_INTEGER()
class VF_TARGET_DRIVER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TreeNode = VF_AVL_TREE_NODE()
self.u1 = _unnamed_34312()
self.VerifiedData = v_ptr32()
class DBGKD_GET_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Unused = v_uint32()
class VACB_LEVEL_ALLOCATION_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.VacbLevelList = LIST_ENTRY()
self.VacbLevelWithBcbListHeads = v_ptr32()
self.VacbLevelsAllocated = v_uint32()
class KTRANSACTION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.OutcomeEvent = KEVENT()
self.cookie = v_uint32()
self.Mutex = KMUTANT()
self.TreeTx = v_ptr32()
self.GlobalNamespaceLink = KTMOBJECT_NAMESPACE_LINK()
self.TmNamespaceLink = KTMOBJECT_NAMESPACE_LINK()
self.UOW = GUID()
self.State = v_uint32()
self.Flags = v_uint32()
self.EnlistmentHead = LIST_ENTRY()
self.EnlistmentCount = v_uint32()
self.RecoverableEnlistmentCount = v_uint32()
self.PrePrepareRequiredEnlistmentCount = v_uint32()
self.PrepareRequiredEnlistmentCount = v_uint32()
self.OutcomeRequiredEnlistmentCount = v_uint32()
self.PendingResponses = v_uint32()
self.SuperiorEnlistment = v_ptr32()
self._pad00a0 = v_bytes(size=4)
self.LastLsn = CLS_LSN()
self.PromotedEntry = LIST_ENTRY()
self.PromoterTransaction = v_ptr32()
self.PromotePropagation = v_ptr32()
self.IsolationLevel = v_uint32()
self.IsolationFlags = v_uint32()
self.Timeout = LARGE_INTEGER()
self.Description = UNICODE_STRING()
self.RollbackThread = v_ptr32()
self.RollbackWorkItem = WORK_QUEUE_ITEM()
self.RollbackDpc = KDPC()
self._pad0108 = v_bytes(size=4)
self.RollbackTimer = KTIMER()
self.LsnOrderedEntry = LIST_ENTRY()
self.Outcome = v_uint32()
self.Tm = v_ptr32()
self.CommitReservation = v_uint64()
self.TransactionHistory = vstruct.VArray([ KTRANSACTION_HISTORY() for i in xrange(10) ])
self.TransactionHistoryCount = v_uint32()
self.DTCPrivateInformation = v_ptr32()
self.DTCPrivateInformationLength = v_uint32()
self.DTCPrivateInformationMutex = KMUTANT()
self.PromotedTxSelfHandle = v_ptr32()
self.PendingPromotionCount = v_uint32()
self.PromotionCompletedEvent = KEVENT()
self._pad01e0 = v_bytes(size=4)
class GENERIC_MAPPING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.GenericRead = v_uint32()
self.GenericWrite = v_uint32()
self.GenericExecute = v_uint32()
self.GenericAll = v_uint32()
class _unnamed_31111(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.QueueType = v_uint32()
class DEVICE_NODE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Sibling = v_ptr32()
self.Child = v_ptr32()
self.Parent = v_ptr32()
self.LastChild = v_ptr32()
self.PhysicalDeviceObject = v_ptr32()
self.InstancePath = UNICODE_STRING()
self.ServiceName = UNICODE_STRING()
self.PendingIrp = v_ptr32()
self.Level = v_uint32()
self.CurrentPowerState = POWER_STATE()
self.Notify = PO_DEVICE_NOTIFY()
self.PoIrpManager = PO_IRP_MANAGER()
self.FxDevice = v_ptr32()
self.FxDeviceLock = v_uint32()
self.FxRemoveEvent = KEVENT()
self.FxActivationCount = v_uint32()
self.FxSleepCount = v_uint32()
self.Plugin = v_ptr32()
self.UniqueId = UNICODE_STRING()
self.PowerFlags = v_uint32()
self.State = v_uint32()
self.PreviousState = v_uint32()
self.StateHistory = vstruct.VArray([ PNP_DEVNODE_STATE() for i in xrange(20) ])
self.StateHistoryEntry = v_uint32()
self.CompletionStatus = v_uint32()
self.Flags = v_uint32()
self.UserFlags = v_uint32()
self.Problem = v_uint32()
self.ProblemStatus = v_uint32()
self.ResourceList = v_ptr32()
self.ResourceListTranslated = v_ptr32()
self.DuplicatePDO = v_ptr32()
self.ResourceRequirements = v_ptr32()
self.InterfaceType = v_uint32()
self.BusNumber = v_uint32()
self.ChildInterfaceType = v_uint32()
self.ChildBusNumber = v_uint32()
self.ChildBusTypeIndex = v_uint16()
self.RemovalPolicy = v_uint8()
self.HardwareRemovalPolicy = v_uint8()
self.TargetDeviceNotify = LIST_ENTRY()
self.DeviceArbiterList = LIST_ENTRY()
self.DeviceTranslatorList = LIST_ENTRY()
self.NoTranslatorMask = v_uint16()
self.QueryTranslatorMask = v_uint16()
self.NoArbiterMask = v_uint16()
self.QueryArbiterMask = v_uint16()
self.OverUsed1 = _unnamed_29795()
self.OverUsed2 = _unnamed_29796()
self.BootResources = v_ptr32()
self.BootResourcesTranslated = v_ptr32()
self.CapabilityFlags = v_uint32()
self.DockInfo = _unnamed_29797()
self.DisableableDepends = v_uint32()
self.PendedSetInterfaceState = LIST_ENTRY()
self.LegacyBusListEntry = LIST_ENTRY()
self.DriverUnloadRetryCount = v_uint32()
self.PreviousParent = v_ptr32()
self.DeletedChildren = v_uint32()
self.NumaNodeIndex = v_uint32()
self.ContainerID = GUID()
self.OverrideFlags = v_uint8()
self._pad01bc = v_bytes(size=3)
self.DeviceIdsHash = v_uint32()
self.RequiresUnloadedDriver = v_uint8()
self._pad01c4 = v_bytes(size=3)
self.PendingEjectRelations = v_ptr32()
self.StateFlags = v_uint32()
class KALPC_MESSAGE_ATTRIBUTES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ClientContext = v_ptr32()
self.ServerContext = v_ptr32()
self.PortContext = v_ptr32()
self.CancelPortContext = v_ptr32()
self.SecurityData = v_ptr32()
self.View = v_ptr32()
self.HandleData = v_ptr32()
class PPC_DBGKD_CONTROL_SET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Continue = v_uint32()
self.CurrentSymbolStart = v_uint32()
self.CurrentSymbolEnd = v_uint32()
class _unnamed_31033(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.s1 = _unnamed_31111()
class PROC_PERF_LOAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BusyPercentage = v_uint8()
self.FrequencyPercentage = v_uint8()
class AUX_ACCESS_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PrivilegesUsed = v_ptr32()
self.GenericMapping = GENERIC_MAPPING()
self.AccessesToAudit = v_uint32()
self.MaximumAuditMask = v_uint32()
self.TransactionId = GUID()
self.NewSecurityDescriptor = v_ptr32()
self.ExistingSecurityDescriptor = v_ptr32()
self.ParentSecurityDescriptor = v_ptr32()
self.DeRefSecurityDescriptor = v_ptr32()
self.SDLock = v_ptr32()
self.AccessReasons = ACCESS_REASONS()
self.GenerateStagingEvents = v_uint8()
self._pad00c4 = v_bytes(size=3)
class SE_AUDIT_PROCESS_CREATION_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ImageFileName = v_ptr32()
class IO_RESOURCE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Version = v_uint16()
self.Revision = v_uint16()
self.Count = v_uint32()
self.Descriptors = vstruct.VArray([ IO_RESOURCE_DESCRIPTOR() for i in xrange(1) ])
class STACK_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NumStackTraces = v_uint16()
self.TraceCapacity = v_uint16()
self.StackTrace = vstruct.VArray([ v_ptr32() for i in xrange(16) ])
self.StackTableHash = vstruct.VArray([ v_uint16() for i in xrange(16381) ])
self._pad8040 = v_bytes(size=2)
class _unnamed_27934(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.FileName = v_ptr32()
self.FileInformationClass = v_uint32()
self.FileIndex = v_uint32()
class _unnamed_37086(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ImagePteOffset = v_uint32()
class _unnamed_37087(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.e1 = MMINPAGE_FLAGS()
class OBJECT_HEADER_HANDLE_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.HandleCountDataBase = v_ptr32()
self._pad0008 = v_bytes(size=4)
class ETW_LOGGER_HANDLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DereferenceAndLeave = v_uint8()
class IMAGE_ROM_OPTIONAL_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Magic = v_uint16()
self.MajorLinkerVersion = v_uint8()
self.MinorLinkerVersion = v_uint8()
self.SizeOfCode = v_uint32()
self.SizeOfInitializedData = v_uint32()
self.SizeOfUninitializedData = v_uint32()
self.AddressOfEntryPoint = v_uint32()
self.BaseOfCode = v_uint32()
self.BaseOfData = v_uint32()
self.BaseOfBss = v_uint32()
self.GprMask = v_uint32()
self.CprMask = vstruct.VArray([ v_uint32() for i in xrange(4) ])
self.GpValue = v_uint32()
class POP_FX_PLUGIN(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Link = LIST_ENTRY()
self.Version = v_uint32()
self._pad0010 = v_bytes(size=4)
self.Flags = v_uint64()
self.WorkOrder = POP_FX_WORK_ORDER()
self.WorkQueue = KQUEUE()
self.AcceptDeviceNotification = v_ptr32()
self.AcceptProcessorNotification = v_ptr32()
self._pad0060 = v_bytes(size=4)
class _unnamed_27993(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityInformation = v_uint32()
self.Length = v_uint32()
class HEAP_FREE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self.Flags = v_uint8()
self.SmallTagIndex = v_uint8()
self.PreviousSize = v_uint16()
self.SegmentOffset = v_uint8()
self.UnusedBytes = v_uint8()
self.FreeList = LIST_ENTRY()
class LOGGED_STREAM_CALLBACK_V1(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LogHandle = v_ptr32()
self.FlushToLsnRoutine = v_ptr32()
class MI_PTE_CHAIN_HEAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flink = MMPTE()
self.Blink = MMPTE()
self.PteBase = v_ptr32()
self._pad0018 = v_bytes(size=4)
class _unnamed_27996(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityInformation = v_uint32()
self.SecurityDescriptor = v_ptr32()
class POOL_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PoolType = v_uint32()
self.PagedLock = FAST_MUTEX()
self._pad0040 = v_bytes(size=28)
self.RunningAllocs = v_uint32()
self.RunningDeAllocs = v_uint32()
self.TotalBigPages = v_uint32()
self.ThreadsProcessingDeferrals = v_uint32()
self.TotalBytes = v_uint32()
self._pad0080 = v_bytes(size=44)
self.PoolIndex = v_uint32()
self._pad00c0 = v_bytes(size=60)
self.TotalPages = v_uint32()
self._pad0100 = v_bytes(size=60)
self.PendingFrees = SINGLE_LIST_ENTRY()
self.PendingFreeDepth = v_uint32()
self._pad0140 = v_bytes(size=56)
self.ListHeads = vstruct.VArray([ LIST_ENTRY() for i in xrange(512) ])
class OBJECT_REF_STACK_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Sequence = v_uint32()
self.Index = v_uint16()
self.NumTraces = v_uint16()
self.Tag = v_uint32()
class PF_KERNEL_GLOBALS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AccessBufferAgeThreshold = v_uint64()
self.AccessBufferRef = EX_RUNDOWN_REF()
self.AccessBufferExistsEvent = KEVENT()
self.AccessBufferMax = v_uint32()
self.AccessBufferList = SLIST_HEADER()
self.StreamSequenceNumber = v_uint32()
self.Flags = v_uint32()
self.ScenarioPrefetchCount = v_uint32()
self._pad0040 = v_bytes(size=12)
class _unnamed_34113(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Data = vstruct.VArray([ v_uint32() for i in xrange(3) ])
class GDI_TEB_BATCH64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Offset = v_uint32()
self._pad0008 = v_bytes(size=4)
self.HDC = v_uint64()
self.Buffer = vstruct.VArray([ v_uint32() for i in xrange(310) ])
class DBGKD_READ_MEMORY64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TargetBaseAddress = v_uint64()
self.TransferCount = v_uint32()
self.ActualBytesRead = v_uint32()
class MI_SYSTEM_PTE_TYPE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Bitmap = RTL_BITMAP()
self.Flags = v_uint32()
self.Hint = v_uint32()
self.BasePte = v_ptr32()
self.FailureCount = v_ptr32()
self.Vm = v_ptr32()
self.TotalSystemPtes = v_uint32()
self.TotalFreeSystemPtes = v_uint32()
self.CachedPteCount = v_uint32()
self.PteFailures = v_uint32()
self.SpinLock = v_uint32()
self.CachedPtes = v_ptr32()
class MMPTE_HIGHLOW(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.HighPart = v_uint32()
class PO_MEMORY_IMAGE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.ImageType = v_uint32()
self.CheckSum = v_uint32()
self.LengthSelf = v_uint32()
self.PageSelf = v_uint32()
self.PageSize = v_uint32()
self.SystemTime = LARGE_INTEGER()
self.InterruptTime = v_uint64()
self.FeatureFlags = v_uint32()
self.HiberFlags = v_uint8()
self.spare = vstruct.VArray([ v_uint8() for i in xrange(3) ])
self.NoHiberPtes = v_uint32()
self.HiberVa = v_uint32()
self.NoFreePages = v_uint32()
self.FreeMapCheck = v_uint32()
self.WakeCheck = v_uint32()
self._pad0048 = v_bytes(size=4)
self.NumPagesForLoader = v_uint64()
self.FirstBootRestorePage = v_uint32()
self.FirstKernelRestorePage = v_uint32()
self.PerfInfo = PO_HIBER_PERF()
self.FirmwareRuntimeInformationPages = v_uint32()
self.FirmwareRuntimeInformation = vstruct.VArray([ v_uint32() for i in xrange(1) ])
self.SiLogOffset = v_uint32()
self.NoBootLoaderLogPages = v_uint32()
self.BootLoaderLogPages = vstruct.VArray([ v_uint32() for i in xrange(24) ])
self.NotUsed = v_uint32()
self.ResumeContextCheck = v_uint32()
self.ResumeContextPages = v_uint32()
self.Hiberboot = v_uint8()
self._pad0280 = v_bytes(size=3)
self.HvCr3 = v_uint64()
self.HvEntryPoint = v_uint64()
self.HvReservedTransitionAddress = v_uint64()
self.HvReservedTransitionAddressSize = v_uint64()
self.BootFlags = v_uint64()
self.HalEntryPointPhysical = v_uint64()
self.HighestPhysicalPage = v_uint32()
self.BitlockerKeyPfns = vstruct.VArray([ v_uint32() for i in xrange(4) ])
self.HardwareSignature = v_uint32()
class LOOKASIDE_LIST_EX(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.L = GENERAL_LOOKASIDE_POOL()
class ETHREAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Tcb = KTHREAD()
self.CreateTime = LARGE_INTEGER()
self.ExitTime = LARGE_INTEGER()
self.ChargeOnlySession = v_ptr32()
self.PostBlockList = LIST_ENTRY()
self.TerminationPort = v_ptr32()
self.ActiveTimerListLock = v_uint32()
self.ActiveTimerListHead = LIST_ENTRY()
self.Cid = CLIENT_ID()
self.KeyedWaitSemaphore = KSEMAPHORE()
self.ClientSecurity = PS_CLIENT_SECURITY_CONTEXT()
self.IrpList = LIST_ENTRY()
self.TopLevelIrp = v_uint32()
self.DeviceToVerify = v_ptr32()
self.Win32StartAddress = v_ptr32()
self.LegacyPowerObject = v_ptr32()
self.ThreadListEntry = LIST_ENTRY()
self.RundownProtect = EX_RUNDOWN_REF()
self.ThreadLock = EX_PUSH_LOCK()
self.ReadClusterSize = v_uint32()
self.MmLockOrdering = v_uint32()
self.CmLockOrdering = v_uint32()
self.CrossThreadFlags = v_uint32()
self.SameThreadPassiveFlags = v_uint32()
self.SameThreadApcFlags = v_uint32()
self.CacheManagerActive = v_uint8()
self.DisablePageFaultClustering = v_uint8()
self.ActiveFaultCount = v_uint8()
self.LockOrderState = v_uint8()
self.AlpcMessageId = v_uint32()
self.AlpcMessage = v_ptr32()
self.ExitStatus = v_uint32()
self.AlpcWaitListEntry = LIST_ENTRY()
self.CacheManagerCount = v_uint32()
self.IoBoostCount = v_uint32()
self.BoostList = LIST_ENTRY()
self.DeboostList = LIST_ENTRY()
self.BoostListLock = v_uint32()
self.IrpListLock = v_uint32()
self.ReservedForSynchTracking = v_ptr32()
self.CmCallbackListHead = SINGLE_LIST_ENTRY()
self.ActivityId = v_ptr32()
self.WnfContext = v_ptr32()
self.SeLearningModeListHead = SINGLE_LIST_ENTRY()
self.KernelStackReference = v_uint32()
self._pad02c8 = v_bytes(size=4)
class EVENT_DATA_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Ptr = v_uint64()
self.Size = v_uint32()
self.Reserved = v_uint32()
class TOKEN_AUDIT_POLICY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PerUserPolicy = vstruct.VArray([ v_uint8() for i in xrange(29) ])
class HHIVE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.GetCellRoutine = v_ptr32()
self.Allocate = v_ptr32()
self.Free = v_ptr32()
self.FileWrite = v_ptr32()
self.FileRead = v_ptr32()
self.HiveLoadFailure = v_ptr32()
self.BaseBlock = v_ptr32()
self.DirtyVector = RTL_BITMAP()
self.DirtyCount = v_uint32()
self.DirtyAlloc = v_uint32()
self.BaseBlockAlloc = v_uint32()
self.Cluster = v_uint32()
self.Flat = v_uint8()
self.ReadOnly = v_uint8()
self.DirtyFlag = v_uint8()
self._pad003c = v_bytes(size=1)
self.HvBinHeadersUse = v_uint32()
self.HvFreeCellsUse = v_uint32()
self.HvUsedCellsUse = v_uint32()
self.CmUsedCellsUse = v_uint32()
self.HiveFlags = v_uint32()
self.CurrentLog = v_uint32()
self.LogSize = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self.RefreshCount = v_uint32()
self.StorageTypeCount = v_uint32()
self.Version = v_uint32()
self.Storage = vstruct.VArray([ DUAL() for i in xrange(2) ])
class VF_AVL_TREE_NODE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.p = v_ptr32()
self.RangeSize = v_uint32()
class TEB_ACTIVE_FRAME_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint32()
self.FrameName = v_ptr32()
class CLIENT_ID64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.UniqueProcess = v_uint64()
self.UniqueThread = v_uint64()
class FS_FILTER_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AcquireForModifiedPageWriter = _unnamed_35375()
self._pad0014 = v_bytes(size=12)
class OBJECT_SYMBOLIC_LINK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CreationTime = LARGE_INTEGER()
self.LinkTarget = UNICODE_STRING()
self.DosDeviceDriveIndex = v_uint32()
self._pad0018 = v_bytes(size=4)
class HEAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Entry = HEAP_ENTRY()
self.SegmentSignature = v_uint32()
self.SegmentFlags = v_uint32()
self.SegmentListEntry = LIST_ENTRY()
self.Heap = v_ptr32()
self.BaseAddress = v_ptr32()
self.NumberOfPages = v_uint32()
self.FirstEntry = v_ptr32()
self.LastValidEntry = v_ptr32()
self.NumberOfUnCommittedPages = v_uint32()
self.NumberOfUnCommittedRanges = v_uint32()
self.SegmentAllocatorBackTraceIndex = v_uint16()
self.Reserved = v_uint16()
self.UCRSegmentList = LIST_ENTRY()
self.Flags = v_uint32()
self.ForceFlags = v_uint32()
self.CompatibilityFlags = v_uint32()
self.EncodeFlagMask = v_uint32()
self.Encoding = HEAP_ENTRY()
self.Interceptor = v_uint32()
self.VirtualMemoryThreshold = v_uint32()
self.Signature = v_uint32()
self.SegmentReserve = v_uint32()
self.SegmentCommit = v_uint32()
self.DeCommitFreeBlockThreshold = v_uint32()
self.DeCommitTotalFreeThreshold = v_uint32()
self.TotalFreeSize = v_uint32()
self.MaximumAllocationSize = v_uint32()
self.ProcessHeapsListIndex = v_uint16()
self.HeaderValidateLength = v_uint16()
self.HeaderValidateCopy = v_ptr32()
self.NextAvailableTagIndex = v_uint16()
self.MaximumTagIndex = v_uint16()
self.TagEntries = v_ptr32()
self.UCRList = LIST_ENTRY()
self.AlignRound = v_uint32()
self.AlignMask = v_uint32()
self.VirtualAllocdBlocks = LIST_ENTRY()
self.SegmentList = LIST_ENTRY()
self.AllocatorBackTraceIndex = v_uint16()
self._pad00b0 = v_bytes(size=2)
self.NonDedicatedListLength = v_uint32()
self.BlocksIndex = v_ptr32()
self.UCRIndex = v_ptr32()
self.PseudoTagEntries = v_ptr32()
self.FreeLists = LIST_ENTRY()
self.LockVariable = v_ptr32()
self.CommitRoutine = v_ptr32()
self.FrontEndHeap = v_ptr32()
self.FrontHeapLockCount = v_uint16()
self.FrontEndHeapType = v_uint8()
self.RequestedFrontEndHeapType = v_uint8()
self.FrontEndHeapUsageData = v_ptr32()
self.FrontEndHeapMaximumIndex = v_uint16()
self.FrontEndHeapStatusBitmap = vstruct.VArray([ v_uint8() for i in xrange(257) ])
self._pad01e0 = v_bytes(size=1)
self.Counters = HEAP_COUNTERS()
self.TuningParameters = HEAP_TUNING_PARAMETERS()
self._pad0248 = v_bytes(size=4)
class EJOB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Event = KEVENT()
self.JobLinks = LIST_ENTRY()
self.ProcessListHead = LIST_ENTRY()
self.JobLock = ERESOURCE()
self.TotalUserTime = LARGE_INTEGER()
self.TotalKernelTime = LARGE_INTEGER()
self.TotalCycleTime = LARGE_INTEGER()
self.ThisPeriodTotalUserTime = LARGE_INTEGER()
self.ThisPeriodTotalKernelTime = LARGE_INTEGER()
self.TotalContextSwitches = v_uint64()
self.TotalPageFaultCount = v_uint32()
self.TotalProcesses = v_uint32()
self.ActiveProcesses = v_uint32()
self.TotalTerminatedProcesses = v_uint32()
self.PerProcessUserTimeLimit = LARGE_INTEGER()
self.PerJobUserTimeLimit = LARGE_INTEGER()
self.MinimumWorkingSetSize = v_uint32()
self.MaximumWorkingSetSize = v_uint32()
self.LimitFlags = v_uint32()
self.ActiveProcessLimit = v_uint32()
self.Affinity = KAFFINITY_EX()
self.AccessState = v_ptr32()
self.AccessStateQuotaReference = v_ptr32()
self.UIRestrictionsClass = v_uint32()
self.EndOfJobTimeAction = v_uint32()
self.CompletionPort = v_ptr32()
self.CompletionKey = v_ptr32()
self._pad00e0 = v_bytes(size=4)
self.CompletionCount = v_uint64()
self.SessionId = v_uint32()
self.SchedulingClass = v_uint32()
self.ReadOperationCount = v_uint64()
self.WriteOperationCount = v_uint64()
self.OtherOperationCount = v_uint64()
self.ReadTransferCount = v_uint64()
self.WriteTransferCount = v_uint64()
self.OtherTransferCount = v_uint64()
self.DiskIoInfo = PROCESS_DISK_COUNTERS()
self.ProcessMemoryLimit = v_uint32()
self.JobMemoryLimit = v_uint32()
self.PeakProcessMemoryUsed = v_uint32()
self.PeakJobMemoryUsed = v_uint32()
self.EffectiveAffinity = KAFFINITY_EX()
self._pad0168 = v_bytes(size=4)
self.EffectivePerProcessUserTimeLimit = LARGE_INTEGER()
self.EffectiveMinimumWorkingSetSize = v_uint32()
self.EffectiveMaximumWorkingSetSize = v_uint32()
self.EffectiveProcessMemoryLimit = v_uint32()
self.EffectiveProcessMemoryLimitJob = v_ptr32()
self.EffectivePerProcessUserTimeLimitJob = v_ptr32()
self.EffectiveLimitFlags = v_uint32()
self.EffectiveSchedulingClass = v_uint32()
self.EffectiveFreezeCount = v_uint32()
self.EffectiveBackgroundCount = v_uint32()
self.EffectiveSwapCount = v_uint32()
self.EffectiveNotificationLimitCount = v_uint32()
self.EffectivePriorityClass = v_uint8()
self.PriorityClass = v_uint8()
self.Reserved1 = vstruct.VArray([ v_uint8() for i in xrange(2) ])
self.CompletionFilter = v_uint32()
self._pad01a8 = v_bytes(size=4)
self.WakeChannel = WNF_STATE_NAME()
self._pad01f0 = v_bytes(size=64)
self.WakeFilter = JOBOBJECT_WAKE_FILTER()
self.LowEdgeLatchFilter = v_uint32()
self.OwnedHighEdgeFilters = v_uint32()
self.NotificationLink = v_ptr32()
self._pad0208 = v_bytes(size=4)
self.CurrentJobMemoryUsed = v_uint64()
self.NotificationInfo = v_ptr32()
self.NotificationInfoQuotaReference = v_ptr32()
self.NotificationPacket = v_ptr32()
self.CpuRateControl = v_ptr32()
self.EffectiveSchedulingGroup = v_ptr32()
self.MemoryLimitsLock = EX_PUSH_LOCK()
self.SiblingJobLinks = LIST_ENTRY()
self.ChildJobListHead = LIST_ENTRY()
self.ParentJob = v_ptr32()
self.RootJob = v_ptr32()
self.IteratorListHead = LIST_ENTRY()
self.Accounting = EPROCESS_VALUES()
self.ShadowActiveProcessCount = v_uint32()
self.SequenceNumber = v_uint32()
self.TimerListLock = v_uint32()
self.TimerListHead = LIST_ENTRY()
self.JobFlags = v_uint32()
self.EffectiveHighEdgeFilters = v_uint32()
self._pad02b8 = v_bytes(size=4)
class PROCESSOR_IDLESTATE_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TimeCheck = v_uint32()
self.DemotePercent = v_uint8()
self.PromotePercent = v_uint8()
self.Spare = vstruct.VArray([ v_uint8() for i in xrange(2) ])
class DBGKD_READ_WRITE_IO_EXTENDED64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DataSize = v_uint32()
self.InterfaceType = v_uint32()
self.BusNumber = v_uint32()
self.AddressSpace = v_uint32()
self.IoAddress = v_uint64()
self.DataValue = v_uint32()
self._pad0020 = v_bytes(size=4)
class ALPC_COMPLETION_PACKET_LOOKASIDE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = SINGLE_LIST_ENTRY()
self.Packet = v_ptr32()
self.Lookaside = v_ptr32()
class IO_STATUS_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Status = v_uint32()
self.Information = v_uint32()
class KPROCESSOR_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ContextFrame = CONTEXT()
self.SpecialRegisters = KSPECIAL_REGISTERS()
class KiIoAccessMap(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DirectionMap = vstruct.VArray([ v_uint8() for i in xrange(32) ])
self.IoMap = vstruct.VArray([ v_uint8() for i in xrange(8196) ])
class KAPC(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint8()
self.SpareByte0 = v_uint8()
self.Size = v_uint8()
self.SpareByte1 = v_uint8()
self.SpareLong0 = v_uint32()
self.Thread = v_ptr32()
self.ApcListEntry = LIST_ENTRY()
self.KernelRoutine = v_ptr32()
self.RundownRoutine = v_ptr32()
self.NormalRoutine = v_ptr32()
self.NormalContext = v_ptr32()
self.SystemArgument1 = v_ptr32()
self.SystemArgument2 = v_ptr32()
self.ApcStateIndex = v_uint8()
self.ApcMode = v_uint8()
self.Inserted = v_uint8()
self._pad0030 = v_bytes(size=1)
class ETW_BUFFER_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ProcessorNumber = v_uint8()
self.Alignment = v_uint8()
self.LoggerId = v_uint16()
class POOL_TRACKER_BIG_PAGES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Va = v_uint32()
self.Key = v_uint32()
self.PoolType = v_uint32()
self.NumberOfBytes = v_uint32()
class SID_IDENTIFIER_AUTHORITY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Value = vstruct.VArray([ v_uint8() for i in xrange(6) ])
class RTL_RANGE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListHead = LIST_ENTRY()
self.Flags = v_uint32()
self.Count = v_uint32()
self.Stamp = v_uint32()
class PROC_PERF_HISTORY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint32()
self.Slot = v_uint32()
self.UtilityTotal = v_uint32()
self.AffinitizedUtilityTotal = v_uint32()
self.FrequencyTotal = v_uint32()
self.HistoryList = vstruct.VArray([ PROC_PERF_HISTORY_ENTRY() for i in xrange(1) ])
self._pad001c = v_bytes(size=2)
class CM_NOTIFY_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.HiveList = LIST_ENTRY()
self.PostList = LIST_ENTRY()
self.KeyControlBlock = v_ptr32()
self.KeyBody = v_ptr32()
self.Filter = v_uint32()
self.SubjectContext = SECURITY_SUBJECT_CONTEXT()
class _unnamed_36425(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NodeSize = v_uint32()
class DRIVER_OBJECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.DeviceObject = v_ptr32()
self.Flags = v_uint32()
self.DriverStart = v_ptr32()
self.DriverSize = v_uint32()
self.DriverSection = v_ptr32()
self.DriverExtension = v_ptr32()
self.DriverName = UNICODE_STRING()
self.HardwareDatabase = v_ptr32()
self.FastIoDispatch = v_ptr32()
self.DriverInit = v_ptr32()
self.DriverStartIo = v_ptr32()
self.DriverUnload = v_ptr32()
self.MajorFunction = vstruct.VArray([ v_ptr32() for i in xrange(28) ])
class VI_POOL_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PageHeader = VI_POOL_PAGE_HEADER()
self._pad0010 = v_bytes(size=4)
class POOL_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PreviousSize = v_uint16()
self.BlockSize = v_uint16()
self.PoolTag = v_uint32()
class SHARED_CACHE_MAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NodeTypeCode = v_uint16()
self.NodeByteSize = v_uint16()
self.OpenCount = v_uint32()
self.FileSize = LARGE_INTEGER()
self.BcbList = LIST_ENTRY()
self.SectionSize = LARGE_INTEGER()
self.ValidDataLength = LARGE_INTEGER()
self.ValidDataGoal = LARGE_INTEGER()
self.InitialVacbs = vstruct.VArray([ v_ptr32() for i in xrange(4) ])
self.Vacbs = v_ptr32()
self.FileObjectFastRef = EX_FAST_REF()
self.VacbLock = EX_PUSH_LOCK()
self.DirtyPages = v_uint32()
self.LoggedStreamLinks = LIST_ENTRY()
self.SharedCacheMapLinks = LIST_ENTRY()
self.Flags = v_uint32()
self.Status = v_uint32()
self.Mbcb = v_ptr32()
self.Section = v_ptr32()
self.CreateEvent = v_ptr32()
self.WaitOnActiveCount = v_ptr32()
self.PagesToWrite = v_uint32()
self._pad0080 = v_bytes(size=4)
self.BeyondLastFlush = v_uint64()
self.Callbacks = v_ptr32()
self.LazyWriteContext = v_ptr32()
self.PrivateList = LIST_ENTRY()
self.V1 = LOGGED_STREAM_CALLBACK_V1()
self.LargestLSN = LARGE_INTEGER()
self.DirtyPageThreshold = v_uint32()
self.LazyWritePassCount = v_uint32()
self.UninitializeEvent = v_ptr32()
self.BcbLock = FAST_MUTEX()
self._pad00d8 = v_bytes(size=4)
self.LastUnmapBehindOffset = LARGE_INTEGER()
self.Event = KEVENT()
self.HighWaterMappingOffset = LARGE_INTEGER()
self.PrivateCacheMap = PRIVATE_CACHE_MAP()
self.WriteBehindWorkQueueEntry = v_ptr32()
self.VolumeCacheMap = v_ptr32()
self.ProcImagePathHash = v_uint32()
self.WritesInProgress = v_uint32()
class MMPTE_PROTOTYPE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Valid = v_uint64()
class REMOTE_PORT_VIEW(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.ViewSize = v_uint32()
self.ViewBase = v_ptr32()
class IO_MINI_COMPLETION_PACKET_USER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.PacketType = v_uint32()
self.KeyContext = v_ptr32()
self.ApcContext = v_ptr32()
self.IoStatus = v_uint32()
self.IoStatusInformation = v_uint32()
self.MiniPacketCallback = v_ptr32()
self.Context = v_ptr32()
self.Allocated = v_uint8()
self._pad0028 = v_bytes(size=3)
class XSTATE_FEATURE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Offset = v_uint32()
self.Size = v_uint32()
class GDI_TEB_BATCH32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Offset = v_uint32()
self.HDC = v_uint32()
self.Buffer = vstruct.VArray([ v_uint32() for i in xrange(310) ])
class _unnamed_28195(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Argument1 = v_ptr32()
self.Argument2 = v_ptr32()
self.Argument3 = v_ptr32()
self.Argument4 = v_ptr32()
class WHEA_TIMESTAMP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Seconds = v_uint64()
class _unnamed_28190(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ProviderId = v_uint32()
self.DataPath = v_ptr32()
self.BufferSize = v_uint32()
self.Buffer = v_ptr32()
class ETW_REF_CLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.StartTime = LARGE_INTEGER()
self.StartPerfClock = LARGE_INTEGER()
class RTL_CRITICAL_SECTION_DEBUG(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.CreatorBackTraceIndex = v_uint16()
self.CriticalSection = v_ptr32()
self.ProcessLocksList = LIST_ENTRY()
self.EntryCount = v_uint32()
self.ContentionCount = v_uint32()
self.Flags = v_uint32()
self.CreatorBackTraceIndexHigh = v_uint16()
self.SpareUSHORT = v_uint16()
class PNP_DEVICE_EVENT_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.Argument = v_uint32()
self.CallerEvent = v_ptr32()
self.Callback = v_ptr32()
self.Context = v_ptr32()
self.VetoType = v_ptr32()
self.VetoName = v_ptr32()
self.RefCount = v_uint32()
self.Lock = v_uint32()
self.Cancel = v_uint8()
self._pad002c = v_bytes(size=3)
self.Parent = v_ptr32()
self.Data = PLUGPLAY_EVENT_BLOCK()
class ARBITER_CONFLICT_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.OwningObject = v_ptr32()
self._pad0008 = v_bytes(size=4)
self.Start = v_uint64()
self.End = v_uint64()
class KALPC_VIEW(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ViewListEntry = LIST_ENTRY()
self.Region = v_ptr32()
self.OwnerPort = v_ptr32()
self.OwnerProcess = v_ptr32()
self.Address = v_ptr32()
self.Size = v_uint32()
self.SecureViewHandle = v_ptr32()
self.WriteAccessHandle = v_ptr32()
self.u1 = _unnamed_30920()
self.NumberOfOwnerMessages = v_uint32()
self.ProcessViewListEntry = LIST_ENTRY()
class ETW_SESSION_PERF_COUNTERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BufferMemoryPagedPool = v_uint32()
self.BufferMemoryNonPagedPool = v_uint32()
self.EventsLoggedCount = v_uint64()
self.EventsLost = v_uint32()
self.NumConsumers = v_uint32()
class _unnamed_34685(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = v_uint32()
self.Length = v_uint32()
self.Reserved = v_uint32()
class DIRTY_PAGE_STATISTICS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DirtyPages = v_uint32()
self.DirtyPagesLastScan = v_uint32()
self.DirtyPagesScheduledLastScan = v_uint32()
class ARBITER_BOOT_ALLOCATION_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ArbitrationList = v_ptr32()
class TOKEN(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TokenSource = TOKEN_SOURCE()
self.TokenId = LUID()
self.AuthenticationId = LUID()
self.ParentTokenId = LUID()
self.ExpirationTime = LARGE_INTEGER()
self.TokenLock = v_ptr32()
self.ModifiedId = LUID()
self._pad0040 = v_bytes(size=4)
self.Privileges = SEP_TOKEN_PRIVILEGES()
self.AuditPolicy = SEP_AUDIT_POLICY()
self._pad0078 = v_bytes(size=2)
self.SessionId = v_uint32()
self.UserAndGroupCount = v_uint32()
self.RestrictedSidCount = v_uint32()
self.VariableLength = v_uint32()
self.DynamicCharged = v_uint32()
self.DynamicAvailable = v_uint32()
self.DefaultOwnerIndex = v_uint32()
self.UserAndGroups = v_ptr32()
self.RestrictedSids = v_ptr32()
self.PrimaryGroup = v_ptr32()
self.DynamicPart = v_ptr32()
self.DefaultDacl = v_ptr32()
self.TokenType = v_uint32()
self.ImpersonationLevel = v_uint32()
self.TokenFlags = v_uint32()
self.TokenInUse = v_uint8()
self._pad00b8 = v_bytes(size=3)
self.IntegrityLevelIndex = v_uint32()
self.MandatoryPolicy = v_uint32()
self.LogonSession = v_ptr32()
self.OriginatingLogonSession = LUID()
self.SidHash = SID_AND_ATTRIBUTES_HASH()
self.RestrictedSidHash = SID_AND_ATTRIBUTES_HASH()
self.pSecurityAttributes = v_ptr32()
self.Package = v_ptr32()
self.Capabilities = v_ptr32()
self.CapabilityCount = v_uint32()
self.CapabilitiesHash = SID_AND_ATTRIBUTES_HASH()
self.LowboxNumberEntry = v_ptr32()
self.LowboxHandlesEntry = v_ptr32()
self.pClaimAttributes = v_ptr32()
self.VariablePart = v_uint32()
self._pad0288 = v_bytes(size=4)
class DISPATCHER_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint8()
self.TimerControlFlags = v_uint8()
self.ThreadControlFlags = v_uint8()
self.TimerMiscFlags = v_uint8()
self.SignalState = v_uint32()
self.WaitListHead = LIST_ENTRY()
class PROCESSOR_IDLESTATE_POLICY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Revision = v_uint16()
self.Flags = _unnamed_35941()
self.PolicyCount = v_uint32()
self.Policy = vstruct.VArray([ PROCESSOR_IDLESTATE_INFO() for i in xrange(3) ])
class CM_KEY_BODY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint32()
self.KeyControlBlock = v_ptr32()
self.NotifyBlock = v_ptr32()
self.ProcessID = v_ptr32()
self.KeyBodyList = LIST_ENTRY()
self.Flags = v_uint32()
self.KtmTrans = v_ptr32()
self.KtmUow = v_ptr32()
self.ContextListHead = LIST_ENTRY()
class WHEA_IPF_CMC_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Enabled = v_uint8()
self.Reserved = v_uint8()
class _unnamed_34098(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MinimumVector = v_uint32()
self.MaximumVector = v_uint32()
self.AffinityPolicy = v_uint16()
self.Group = v_uint16()
self.PriorityPolicy = v_uint32()
self.TargetedProcessors = v_uint32()
class KMUTANT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DISPATCHER_HEADER()
self.MutantListEntry = LIST_ENTRY()
self.OwnerThread = v_ptr32()
self.Abandoned = v_uint8()
self.ApcDisable = v_uint8()
self._pad0020 = v_bytes(size=2)
class ASSEMBLY_STORAGE_MAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class MI_VERIFIER_POOL_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.VerifierPoolEntry = v_ptr32()
class _unnamed_36887(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PollInterval = v_uint32()
self.Vector = v_uint32()
self.SwitchToPollingThreshold = v_uint32()
self.SwitchToPollingWindow = v_uint32()
self.ErrorThreshold = v_uint32()
self.ErrorThresholdWindow = v_uint32()
class PROCESSOR_POWER_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IdleStates = v_ptr32()
self.IdleAccounting = v_ptr32()
self.PlatformIdleAccounting = v_ptr32()
self._pad0010 = v_bytes(size=4)
self.IdleTimeLast = v_uint64()
self.IdleTimeTotal = v_uint64()
self.IdleTimeEntry = v_uint64()
self.Reserved = v_uint64()
self.IdlePolicy = PROC_IDLE_POLICY()
self._pad0038 = v_bytes(size=3)
self.Synchronization = PPM_IDLE_SYNCHRONIZATION_STATE()
self._pad0040 = v_bytes(size=4)
self.PerfFeedback = PROC_FEEDBACK()
self.Hypervisor = v_uint32()
self.LastSysTime = v_uint32()
self.WmiDispatchPtr = v_uint32()
self.WmiInterfaceEnabled = v_uint32()
self.FFHThrottleStateInfo = PPM_FFH_THROTTLE_STATE_INFO()
self.PerfActionDpc = KDPC()
self.PerfActionMask = v_uint32()
self._pad0100 = v_bytes(size=4)
self.HvIdleCheck = PROC_IDLE_SNAP()
self.PerfCheck = PROC_PERF_SNAP()
self.Domain = v_ptr32()
self.PerfConstraint = v_ptr32()
self.Concurrency = v_ptr32()
self.Load = v_ptr32()
self.PerfHistory = v_ptr32()
self.GuaranteedPerformancePercent = v_uint8()
self.HvTargetState = v_uint8()
self.Parked = v_uint8()
self.OverUtilized = v_uint8()
self.LatestPerformancePercent = v_uint32()
self.AveragePerformancePercent = v_uint32()
self.LatestAffinitizedPercent = v_uint32()
self.Utility = v_uint32()
self.AffinitizedUtility = v_uint32()
self._pad0180 = v_bytes(size=4)
class _unnamed_36885(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PollInterval = v_uint32()
class PS_WAKE_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NotificationChannel = v_uint64()
self.WakeCounters = vstruct.VArray([ v_uint64() for i in xrange(8) ])
class SECURITY_CLIENT_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityQos = SECURITY_QUALITY_OF_SERVICE()
self.ClientToken = v_ptr32()
self.DirectlyAccessClientToken = v_uint8()
self.DirectAccessEffectiveOnly = v_uint8()
self.ServerIsRemote = v_uint8()
self._pad0014 = v_bytes(size=1)
self.ClientTokenControl = TOKEN_CONTROL()
class _unnamed_37062(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = MMSECURE_FLAGS()
class SID_AND_ATTRIBUTES_HASH(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SidCount = v_uint32()
self.SidAttr = v_ptr32()
self.Hash = vstruct.VArray([ v_uint32() for i in xrange(32) ])
class DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Pad = v_uint16()
self.Limit = v_uint16()
self.Base = v_uint32()
class DBGKD_MANIPULATE_STATE64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ApiNumber = v_uint32()
self.ProcessorLevel = v_uint16()
self.Processor = v_uint16()
self.ReturnStatus = v_uint32()
self._pad0010 = v_bytes(size=4)
self.u = _unnamed_30113()
class _unnamed_27978(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.OutputBufferLength = v_uint32()
self.InputBufferLength = v_uint32()
self.FsControlCode = v_uint32()
self.Type3InputBuffer = v_ptr32()
class LPCP_PORT_QUEUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NonPagedPortQueue = v_ptr32()
self.Semaphore = v_ptr32()
self.ReceiveHead = LIST_ENTRY()
class PHYSICAL_MEMORY_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NumberOfRuns = v_uint32()
self.NumberOfPages = v_uint32()
self.Run = vstruct.VArray([ PHYSICAL_MEMORY_RUN() for i in xrange(1) ])
class _unnamed_27975(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.FsInformationClass = v_uint32()
class MMWSLE_FREE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MustBeZero = v_uint32()
class CACHE_UNINITIALIZE_EVENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.Event = KEVENT()
class JOB_ACCESS_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class SECURITY_QUALITY_OF_SERVICE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.ImpersonationLevel = v_uint32()
self.ContextTrackingMode = v_uint8()
self.EffectiveOnly = v_uint8()
self._pad000c = v_bytes(size=2)
class RTL_ATOM_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.ReferenceCount = v_uint32()
self.PushLock = EX_PUSH_LOCK()
self.ExHandleTable = v_ptr32()
self.Flags = v_uint32()
self.NumberOfBuckets = v_uint32()
self.Buckets = vstruct.VArray([ v_ptr32() for i in xrange(1) ])
class WHEA_ERROR_RECORD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = WHEA_ERROR_RECORD_HEADER()
self.SectionDescriptor = vstruct.VArray([ WHEA_ERROR_RECORD_SECTION_DESCRIPTOR() for i in xrange(1) ])
class CMHIVE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Hive = HHIVE()
self.FileHandles = vstruct.VArray([ v_ptr32() for i in xrange(6) ])
self.NotifyList = LIST_ENTRY()
self.HiveList = LIST_ENTRY()
self.PreloadedHiveList = LIST_ENTRY()
self.HiveRundown = EX_RUNDOWN_REF()
self.ParseCacheEntries = LIST_ENTRY()
self.KcbCacheTable = v_ptr32()
self.KcbCacheTableSize = v_uint32()
self.DeletedKcbTable = v_ptr32()
self.DeletedKcbTableSize = v_uint32()
self.Identity = v_uint32()
self.HiveLock = v_ptr32()
self.WriterLock = v_ptr32()
self.FlusherLock = v_ptr32()
self.FlushDirtyVector = RTL_BITMAP()
self.FlushOffsetArray = v_ptr32()
self.FlushOffsetArrayCount = v_uint32()
self.FlushBaseBlock = v_ptr32()
self.FlushHiveTruncated = v_uint32()
self.SecurityLock = EX_PUSH_LOCK()
self.UseCount = v_uint32()
self.LastShrinkHiveSize = v_uint32()
self.ActualFileSize = LARGE_INTEGER()
self.LogFileSizes = vstruct.VArray([ LARGE_INTEGER() for i in xrange(2) ])
self.FileFullPath = UNICODE_STRING()
self.FileUserName = UNICODE_STRING()
self.HiveRootPath = UNICODE_STRING()
self.SecurityCount = v_uint32()
self.SecurityCacheSize = v_uint32()
self.SecurityHitHint = v_uint32()
self.SecurityCache = v_ptr32()
self.SecurityHash = vstruct.VArray([ LIST_ENTRY() for i in xrange(64) ])
self.UnloadEventCount = v_uint32()
self.UnloadEventArray = v_ptr32()
self.RootKcb = v_ptr32()
self.Frozen = v_uint8()
self._pad0670 = v_bytes(size=3)
self.UnloadWorkItem = v_ptr32()
self.UnloadWorkItemHolder = CM_WORKITEM()
self.GrowOnlyMode = v_uint8()
self._pad068c = v_bytes(size=3)
self.GrowOffset = v_uint32()
self.KcbConvertListHead = LIST_ENTRY()
self.KnodeConvertListHead = LIST_ENTRY()
self.CellRemapArray = v_ptr32()
self.Flags = v_uint32()
self.TrustClassEntry = LIST_ENTRY()
self.DirtyTime = v_uint64()
self.CmRm = v_ptr32()
self.CmRmInitFailPoint = v_uint32()
self.CmRmInitFailStatus = v_uint32()
self.CreatorOwner = v_ptr32()
self.RundownThread = v_ptr32()
self.ActiveFlushThread = v_ptr32()
self.FlushBoostLock = EX_PUSH_LOCK()
self._pad06d8 = v_bytes(size=4)
self.LastWriteTime = LARGE_INTEGER()
self.ReferenceCount = v_uint32()
self.FlushFlags = v_uint32()
self.FlushWaitList = v_ptr32()
self.UnloadHistoryIndex = v_uint32()
self.UnloadHistory = vstruct.VArray([ v_uint32() for i in xrange(128) ])
self.BootStart = v_uint32()
self.UnaccessedStart = v_uint32()
self.UnaccessedEnd = v_uint32()
self.LoadedKeyCount = v_uint32()
self.HandleClosePending = v_uint32()
self.HandleClosePendingEvent = EX_PUSH_LOCK()
class POP_SHUTDOWN_BUG_CHECK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InitiatingThread = v_ptr32()
self.InitiatingProcess = v_ptr32()
self.ThreadId = v_ptr32()
self.ProcessId = v_ptr32()
self.Code = v_uint32()
self.Parameter1 = v_uint32()
self.Parameter2 = v_uint32()
self.Parameter3 = v_uint32()
self.Parameter4 = v_uint32()
class SECTION_OBJECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.StartingVa = v_ptr32()
self.EndingVa = v_ptr32()
self.Parent = v_ptr32()
self.LeftChild = v_ptr32()
self.RightChild = v_ptr32()
self.Segment = v_ptr32()
class PROC_PERF_CONSTRAINT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Prcb = v_ptr32()
self.PerfContext = v_uint32()
self.PlatformCap = v_uint32()
self.ThermalCap = v_uint32()
self.LimitReasons = v_uint32()
self._pad0018 = v_bytes(size=4)
self.PlatformCapStartTime = v_uint64()
self.TargetPercent = v_uint32()
self.DesiredPercent = v_uint32()
self.SelectedPercent = v_uint32()
self.SelectedFrequency = v_uint32()
self.PreviousFrequency = v_uint32()
self.PreviousPercent = v_uint32()
self.LatestFrequencyPercent = v_uint32()
self._pad0040 = v_bytes(size=4)
self.SelectedState = v_uint64()
self.Force = v_uint8()
self._pad0050 = v_bytes(size=7)
class ARBITER_QUERY_ALLOCATED_RESOURCES_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AllocatedResources = v_ptr32()
class LUID(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.HighPart = v_uint32()
class TOKEN_SOURCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SourceName = vstruct.VArray([ v_uint8() for i in xrange(8) ])
self.SourceIdentifier = LUID()
class OBJECT_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PointerCount = v_uint32()
self.HandleCount = v_uint32()
self.Lock = EX_PUSH_LOCK()
self.TypeIndex = v_uint8()
self.TraceFlags = v_uint8()
self.InfoMask = v_uint8()
self.Flags = v_uint8()
self.ObjectCreateInfo = v_ptr32()
self.SecurityDescriptor = v_ptr32()
self.Body = QUAD()
class RTL_DYNAMIC_HASH_TABLE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Linkage = LIST_ENTRY()
self.Signature = v_uint32()
class MM_PAGED_POOL_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Mutex = FAST_MUTEX()
self.PagedPoolAllocationMap = RTL_BITMAP()
self.FirstPteForPagedPool = v_ptr32()
self.PagedPoolHint = v_uint32()
self.AllocatedPagedPool = v_uint32()
class _unnamed_33994(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Generic = _unnamed_34658()
class RTL_TIME_ZONE_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Bias = v_uint32()
self.StandardName = vstruct.VArray([ v_uint16() for i in xrange(32) ])
self.StandardStart = TIME_FIELDS()
self.StandardBias = v_uint32()
self.DaylightName = vstruct.VArray([ v_uint16() for i in xrange(32) ])
self.DaylightStart = TIME_FIELDS()
self.DaylightBias = v_uint32()
class OBJECT_DUMP_CONTROL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Stream = v_ptr32()
self.Detail = v_uint32()
class CACHE_MANAGER_CALLBACKS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AcquireForLazyWrite = v_ptr32()
self.ReleaseFromLazyWrite = v_ptr32()
self.AcquireForReadAhead = v_ptr32()
self.ReleaseFromReadAhead = v_ptr32()
class DBGKD_CONTINUE2(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ContinueStatus = v_uint32()
self.ControlSet = X86_DBGKD_CONTROL_SET()
self._pad0020 = v_bytes(size=12)
class _unnamed_35377(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SyncType = v_uint32()
self.PageProtection = v_uint32()
class HANDLE_TRACE_DB_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ClientId = CLIENT_ID()
self.Handle = v_ptr32()
self.Type = v_uint32()
self.StackTrace = vstruct.VArray([ v_ptr32() for i in xrange(16) ])
class POP_FX_DEVICE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Link = LIST_ENTRY()
self.Plugin = v_ptr32()
self.PluginHandle = v_ptr32()
self.MiniPlugin = v_ptr32()
self.MiniPluginHandle = v_ptr32()
self.DevNode = v_ptr32()
self.DeviceObject = v_ptr32()
self.TargetDevice = v_ptr32()
self.Callbacks = POP_FX_DRIVER_CALLBACKS()
self.DriverContext = v_ptr32()
self.RemoveLock = IO_REMOVE_LOCK()
self.WorkOrder = POP_FX_WORK_ORDER()
self.Status = POP_FX_DEVICE_STATUS()
self.PowerReqCall = v_uint32()
self.PowerNotReqCall = v_uint32()
self.IdleLock = v_uint32()
self.IdleTimer = KTIMER()
self.IdleDpc = KDPC()
self.IdleTimeout = v_uint64()
self.IdleStamp = v_uint64()
self.Irp = v_ptr32()
self.IrpData = v_ptr32()
self.NextIrpDeviceObject = v_ptr32()
self.NextIrpPowerState = POWER_STATE()
self.NextIrpCallerCompletion = v_ptr32()
self.NextIrpCallerContext = v_ptr32()
self.IrpCompleteEvent = KEVENT()
self.ComponentCount = v_uint32()
self.Components = vstruct.VArray([ v_ptr32() for i in xrange(1) ])
class TOKEN_CONTROL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TokenId = LUID()
self.AuthenticationId = LUID()
self.ModifiedId = LUID()
self.TokenSource = TOKEN_SOURCE()
class GENERAL_LOOKASIDE_POOL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListHead = SLIST_HEADER()
self.Depth = v_uint16()
self.MaximumDepth = v_uint16()
self.TotalAllocates = v_uint32()
self.AllocateMisses = v_uint32()
self.TotalFrees = v_uint32()
self.FreeMisses = v_uint32()
self.Type = v_uint32()
self.Tag = v_uint32()
self.Size = v_uint32()
self.AllocateEx = v_ptr32()
self.FreeEx = v_ptr32()
self.ListEntry = LIST_ENTRY()
self.LastTotalAllocates = v_uint32()
self.LastAllocateMisses = v_uint32()
self.Future = vstruct.VArray([ v_uint32() for i in xrange(2) ])
class MM_PAGE_ACCESS_INFO_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.File = _unnamed_32122()
class LDRP_CSLIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Tail = v_ptr32()
class ALPC_DISPATCH_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PortObject = v_ptr32()
self.Message = v_ptr32()
self.CommunicationInfo = v_ptr32()
self.TargetThread = v_ptr32()
self.TargetPort = v_ptr32()
self.Flags = v_uint32()
self.TotalLength = v_uint16()
self.Type = v_uint16()
self.DataInfoOffset = v_uint16()
self.SignalCompletion = v_uint8()
self.PostedToCompletionList = v_uint8()
class XPF_MCE_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MCG_CapabilityRW = v_uint32()
class tagSWITCH_CONTEXT_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.guPlatform = GUID()
self.guMinPlatform = GUID()
self.ulElementCount = v_uint32()
self.ulContextMinimum = v_uint16()
self._pad0028 = v_bytes(size=2)
self.ullOsMaxVersionTested = v_uint64()
self.guElements = vstruct.VArray([ GUID() for i in xrange(1) ])
class LPCP_NONPAGED_PORT_QUEUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Semaphore = KSEMAPHORE()
self.BackPointer = v_ptr32()
class KTRANSACTION_HISTORY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.RecordType = v_uint32()
self.Payload = v_uint32()
class RTL_SRWLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Locked = v_uint32()
class BATTERY_REPORTING_SCALE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Granularity = v_uint32()
self.Capacity = v_uint32()
class _unnamed_35378(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NotificationType = v_uint32()
self.SafeToRecurse = v_uint8()
self._pad0008 = v_bytes(size=3)
class SHARED_CACHE_MAP_LIST_CURSOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SharedCacheMapLinks = LIST_ENTRY()
self.Flags = v_uint32()
class _unnamed_36349(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Active = v_uint32()
class MSUBSECTION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ControlArea = v_ptr32()
self.SubsectionBase = v_ptr32()
self.NextSubsection = v_ptr32()
self.PtesInSubsection = v_uint32()
self.UnusedPtes = v_uint32()
self.u = _unnamed_33507()
self.StartingSector = v_uint32()
self.NumberOfFullSectors = v_uint32()
self.SubsectionNode = MM_AVL_NODE()
self.DereferenceList = LIST_ENTRY()
self.NumberOfMappedViews = v_uint32()
self.NumberOfPfnReferences = v_uint32()
class _unnamed_30922(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WriteAccess = v_uint32()
class MMVAD_FLAGS1(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CommitCharge = v_uint32()
class TP_POOL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class XPF_MC_BANK_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ClearOnInitializationRW = v_uint8()
class _unnamed_28084(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Capabilities = v_ptr32()
class _unnamed_28872(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.VirtualAddress = v_ptr32()
class CMP_OFFSET_ARRAY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FileOffset = v_uint32()
self.DataBuffer = v_ptr32()
self.DataLength = v_uint32()
class MMINPAGE_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InjectRetry = v_uint8()
self.PrefetchSystemVmType = v_uint8()
self.PagePriority = v_uint8()
self.ZeroLastPage = v_uint8()
class KALPC_MESSAGE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Entry = LIST_ENTRY()
self.PortQueue = v_ptr32()
self.OwnerPort = v_ptr32()
self.WaitingThread = v_ptr32()
self.u1 = _unnamed_31033()
self.SequenceNo = v_uint32()
self.QuotaProcess = v_ptr32()
self.CancelSequencePort = v_ptr32()
self.CancelQueuePort = v_ptr32()
self.CancelSequenceNo = v_uint32()
self.CancelListEntry = LIST_ENTRY()
self.Reserve = v_ptr32()
self.MessageAttributes = KALPC_MESSAGE_ATTRIBUTES()
self.DataUserVa = v_ptr32()
self.DataSystemVa = v_ptr32()
self.CommunicationInfo = v_ptr32()
self.ConnectionPort = v_ptr32()
self.ServerThread = v_ptr32()
self.WakeReference = v_ptr32()
self.ExtensionBuffer = v_ptr32()
self.ExtensionBufferSize = v_uint32()
self._pad0078 = v_bytes(size=4)
self.PortMessage = PORT_MESSAGE()
class MMVAD_FLAGS2(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FileOffset = v_uint32()
class _unnamed_35925(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListHead = LIST_ENTRY()
class _unnamed_35924(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.UserData = v_ptr32()
self.Owner = v_ptr32()
class LIST_ENTRY32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flink = v_uint32()
self.Blink = v_uint32()
class _unnamed_28120(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeviceTextType = v_uint32()
self.LocaleId = v_uint32()
class WHEA_IPF_MCA_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Enabled = v_uint8()
self.Reserved = v_uint8()
class SINGLE_LIST_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
class DBGKD_QUERY_MEMORY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Address = v_uint64()
self.Reserved = v_uint64()
self.AddressSpace = v_uint32()
self.Flags = v_uint32()
class MMVAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Core = MMVAD_SHORT()
self.u2 = _unnamed_35666()
self.Subsection = v_ptr32()
self.FirstPrototypePte = v_ptr32()
self.LastContiguousPte = v_ptr32()
self.ViewLinks = LIST_ENTRY()
self.VadsProcess = v_ptr32()
self.u4 = _unnamed_35669()
class VF_AVL_TREE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NodeRangeSize = v_uint32()
self.NodeCount = v_uint32()
self.Tables = v_ptr32()
self.TablesNo = v_uint32()
self.u1 = _unnamed_36425()
class VF_POOL_TRACE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Address = v_ptr32()
self.Size = v_uint32()
self.Thread = v_ptr32()
self.StackTrace = vstruct.VArray([ v_ptr32() for i in xrange(13) ])
class KDEVICE_QUEUE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeviceListEntry = LIST_ENTRY()
self.SortKey = v_uint32()
self.Inserted = v_uint8()
self._pad0010 = v_bytes(size=3)
class MMPTE_SUBSECTION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Valid = v_uint64()
class PO_DEVICE_NOTIFY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Link = LIST_ENTRY()
self.PowerChildren = LIST_ENTRY()
self.PowerParents = LIST_ENTRY()
self.TargetDevice = v_ptr32()
self.OrderLevel = v_uint8()
self._pad0020 = v_bytes(size=3)
self.DeviceObject = v_ptr32()
self.DeviceName = v_ptr32()
self.DriverName = v_ptr32()
self.ChildCount = v_uint32()
self.ActiveChild = v_uint32()
self.ParentCount = v_uint32()
self.ActiveParent = v_uint32()
class ALPC_HANDLE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Object = v_ptr32()
class DIRTY_PAGE_THRESHOLDS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DirtyPageThreshold = v_uint32()
self.DirtyPageThresholdTop = v_uint32()
self.DirtyPageThresholdBottom = v_uint32()
self.DirtyPageTarget = v_uint32()
class EXCEPTION_RECORD32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExceptionCode = v_uint32()
self.ExceptionFlags = v_uint32()
self.ExceptionRecord = v_uint32()
self.ExceptionAddress = v_uint32()
self.NumberParameters = v_uint32()
self.ExceptionInformation = vstruct.VArray([ v_uint32() for i in xrange(15) ])
class VI_FAULT_TRACE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Thread = v_ptr32()
self.StackTrace = vstruct.VArray([ v_ptr32() for i in xrange(8) ])
class _unnamed_34192(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Level = v_uint32()
class KTMOBJECT_NAMESPACE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Table = RTL_AVL_TABLE()
self.Mutex = KMUTANT()
self.LinksOffset = v_uint16()
self.GuidOffset = v_uint16()
self.Expired = v_uint8()
self._pad0060 = v_bytes(size=3)
class OBJECT_HEADER_QUOTA_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PagedPoolCharge = v_uint32()
self.NonPagedPoolCharge = v_uint32()
self.SecurityDescriptorCharge = v_uint32()
self.SecurityDescriptorQuotaBlock = v_ptr32()
class _unnamed_28990(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ImageCommitment = v_uint32()
class _unnamed_28991(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ImageInformation = v_ptr32()
class DBGKD_READ_MEMORY32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TargetBaseAddress = v_uint32()
self.TransferCount = v_uint32()
self.ActualBytesRead = v_uint32()
class MI_CACHED_PTE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.GlobalTimeStamp = v_uint32()
self.PteIndex = v_uint32()
class _unnamed_34464(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AsUCHAR = v_uint8()
class ARBITER_ALTERNATIVE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Minimum = v_uint64()
self.Maximum = v_uint64()
self.Length = v_uint64()
self.Alignment = v_uint64()
self.Priority = v_uint32()
self.Flags = v_uint32()
self.Descriptor = v_ptr32()
self.Reserved = vstruct.VArray([ v_uint32() for i in xrange(3) ])
class HEAP_LOOKASIDE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListHead = SLIST_HEADER()
self.Depth = v_uint16()
self.MaximumDepth = v_uint16()
self.TotalAllocates = v_uint32()
self.AllocateMisses = v_uint32()
self.TotalFrees = v_uint32()
self.FreeMisses = v_uint32()
self.LastTotalAllocates = v_uint32()
self.LastAllocateMisses = v_uint32()
self.Counters = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self._pad0030 = v_bytes(size=4)
class WHEA_PCI_SLOT_NUMBER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.u = _unnamed_34980()
class EX_FAST_REF(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Object = v_ptr32()
class HMAP_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Table = vstruct.VArray([ HMAP_ENTRY() for i in xrange(512) ])
class PNP_RESOURCE_REQUEST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PhysicalDevice = v_ptr32()
self.Flags = v_uint32()
self.AllocationType = v_uint32()
self.Priority = v_uint32()
self.Position = v_uint32()
self.ResourceRequirements = v_ptr32()
self.ReqList = v_ptr32()
self.ResourceAssignment = v_ptr32()
self.TranslatedResourceAssignment = v_ptr32()
self.Status = v_uint32()
class RTL_ACTIVATION_CONTEXT_STACK_FRAME(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Previous = v_ptr32()
self.ActivationContext = v_ptr32()
self.Flags = v_uint32()
class VI_DEADLOCK_GLOBALS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TimeAcquire = v_uint64()
self.TimeRelease = v_uint64()
self.ResourceDatabase = v_ptr32()
self.ResourceDatabaseCount = v_uint32()
self.ResourceAddressRange = vstruct.VArray([ VF_ADDRESS_RANGE() for i in xrange(1023) ])
self.ThreadDatabase = v_ptr32()
self.ThreadDatabaseCount = v_uint32()
self.ThreadAddressRange = vstruct.VArray([ VF_ADDRESS_RANGE() for i in xrange(1023) ])
self.AllocationFailures = v_uint32()
self.NodesTrimmedBasedOnAge = v_uint32()
self.NodesTrimmedBasedOnCount = v_uint32()
self.NodesSearched = v_uint32()
self.MaxNodesSearched = v_uint32()
self.SequenceNumber = v_uint32()
self.RecursionDepthLimit = v_uint32()
self.SearchedNodesLimit = v_uint32()
self.DepthLimitHits = v_uint32()
self.SearchLimitHits = v_uint32()
self.ABC_ACB_Skipped = v_uint32()
self.OutOfOrderReleases = v_uint32()
self.NodesReleasedOutOfOrder = v_uint32()
self.TotalReleases = v_uint32()
self.RootNodesDeleted = v_uint32()
self.ForgetHistoryCounter = v_uint32()
self.Instigator = v_ptr32()
self.NumberOfParticipants = v_uint32()
self.Participant = vstruct.VArray([ v_ptr32() for i in xrange(32) ])
self.ChildrenCountWatermark = v_uint32()
self._pad40e0 = v_bytes(size=4)
class FS_FILTER_CALLBACKS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SizeOfFsFilterCallbacks = v_uint32()
self.Reserved = v_uint32()
self.PreAcquireForSectionSynchronization = v_ptr32()
self.PostAcquireForSectionSynchronization = v_ptr32()
self.PreReleaseForSectionSynchronization = v_ptr32()
self.PostReleaseForSectionSynchronization = v_ptr32()
self.PreAcquireForCcFlush = v_ptr32()
self.PostAcquireForCcFlush = v_ptr32()
self.PreReleaseForCcFlush = v_ptr32()
self.PostReleaseForCcFlush = v_ptr32()
self.PreAcquireForModifiedPageWriter = v_ptr32()
self.PostAcquireForModifiedPageWriter = v_ptr32()
self.PreReleaseForModifiedPageWriter = v_ptr32()
self.PostReleaseForModifiedPageWriter = v_ptr32()
class LDR_DDAG_NODE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Modules = LIST_ENTRY()
self.ServiceTagList = v_ptr32()
self.LoadCount = v_uint32()
self.ReferenceCount = v_uint32()
self.DependencyCount = v_uint32()
self.Dependencies = LDRP_CSLIST()
self.IncomingDependencies = LDRP_CSLIST()
self.State = v_uint32()
self.CondenseLink = SINGLE_LIST_ENTRY()
self.PreorderNumber = v_uint32()
self.LowestLink = v_uint32()
class _unnamed_28013(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Srb = v_ptr32()
class PROC_FEEDBACK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Lock = v_uint32()
self._pad0008 = v_bytes(size=4)
self.CyclesLast = v_uint64()
self.CyclesActive = v_uint64()
self.Counters = vstruct.VArray([ v_ptr32() for i in xrange(2) ])
self.LastUpdateTime = v_uint64()
self.UnscaledTime = v_uint64()
self.UnaccountedTime = v_uint64()
self.ScaledTime = vstruct.VArray([ v_uint64() for i in xrange(2) ])
self.UnaccountedKernelTime = v_uint64()
self.PerformanceScaledKernelTime = v_uint64()
self.UserTimeLast = v_uint32()
self.KernelTimeLast = v_uint32()
self.KernelTimesIndex = v_uint8()
self._pad0068 = v_bytes(size=7)
class ETW_PMC_SUPPORT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Source = vstruct.VArray([ KPROFILE_SOURCE() for i in xrange(4) ])
self.HookIdCount = v_uint32()
self.HookId = vstruct.VArray([ v_uint16() for i in xrange(4) ])
self.CountersCount = v_uint32()
self.ProcessorCtrs = vstruct.VArray([ v_ptr32() for i in xrange(1) ])
class DBGKD_READ_WRITE_IO64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IoAddress = v_uint64()
self.DataSize = v_uint32()
self.DataValue = v_uint32()
class KENTROPY_TIMING_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.EntropyCount = v_uint32()
self.Buffer = vstruct.VArray([ v_uint32() for i in xrange(64) ])
self.Dpc = KDPC()
self.LastDeliveredBuffer = v_uint32()
class HEAP_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self.Flags = v_uint8()
self.SmallTagIndex = v_uint8()
self.PreviousSize = v_uint16()
self.SegmentOffset = v_uint8()
self.UnusedBytes = v_uint8()
class WHEA_GENERIC_ERROR_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Reserved = v_uint8()
self.Enabled = v_uint8()
self.ErrStatusBlockLength = v_uint32()
self.RelatedErrorSourceId = v_uint32()
self.ErrStatusAddressSpaceID = v_uint8()
self.ErrStatusAddressBitWidth = v_uint8()
self.ErrStatusAddressBitOffset = v_uint8()
self.ErrStatusAddressAccessSize = v_uint8()
self.ErrStatusAddress = LARGE_INTEGER()
self.Notify = WHEA_NOTIFICATION_DESCRIPTOR()
class TIME_FIELDS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Year = v_uint16()
self.Month = v_uint16()
self.Day = v_uint16()
self.Hour = v_uint16()
self.Minute = v_uint16()
self.Second = v_uint16()
self.Milliseconds = v_uint16()
self.Weekday = v_uint16()
class WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_VALIDBITS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FRUId = v_uint8()
class _unnamed_27515(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self._pad0028 = v_bytes(size=32)
class IMAGE_OPTIONAL_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Magic = v_uint16()
self.MajorLinkerVersion = v_uint8()
self.MinorLinkerVersion = v_uint8()
self.SizeOfCode = v_uint32()
self.SizeOfInitializedData = v_uint32()
self.SizeOfUninitializedData = v_uint32()
self.AddressOfEntryPoint = v_uint32()
self.BaseOfCode = v_uint32()
self.BaseOfData = v_uint32()
self.ImageBase = v_uint32()
self.SectionAlignment = v_uint32()
self.FileAlignment = v_uint32()
self.MajorOperatingSystemVersion = v_uint16()
self.MinorOperatingSystemVersion = v_uint16()
self.MajorImageVersion = v_uint16()
self.MinorImageVersion = v_uint16()
self.MajorSubsystemVersion = v_uint16()
self.MinorSubsystemVersion = v_uint16()
self.Win32VersionValue = v_uint32()
self.SizeOfImage = v_uint32()
self.SizeOfHeaders = v_uint32()
self.CheckSum = v_uint32()
self.Subsystem = v_uint16()
self.DllCharacteristics = v_uint16()
self.SizeOfStackReserve = v_uint32()
self.SizeOfStackCommit = v_uint32()
self.SizeOfHeapReserve = v_uint32()
self.SizeOfHeapCommit = v_uint32()
self.LoaderFlags = v_uint32()
self.NumberOfRvaAndSizes = v_uint32()
self.DataDirectory = vstruct.VArray([ IMAGE_DATA_DIRECTORY() for i in xrange(16) ])
class SCSI_REQUEST_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class ARBITER_ADD_RESERVED_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ReserveDevice = v_ptr32()
class VF_ADDRESS_RANGE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = v_ptr32()
self.End = v_ptr32()
class STRING64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint16()
self.MaximumLength = v_uint16()
self._pad0008 = v_bytes(size=4)
self.Buffer = v_uint64()
class JOB_NOTIFICATION_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class WHEAP_WORK_QUEUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListHead = LIST_ENTRY()
self.ListLock = v_uint32()
self.ItemCount = v_uint32()
self.Dpc = KDPC()
self.WorkItem = WORK_QUEUE_ITEM()
self.WorkRoutine = v_ptr32()
class _unnamed_28809(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PteFrame = v_uint32()
class FAST_MUTEX(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint32()
self.Owner = v_ptr32()
self.Contention = v_uint32()
self.Event = KEVENT()
self.OldIrql = v_uint32()
class AER_BRIDGE_DESCRIPTOR_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.UncorrectableErrorMaskRW = v_uint16()
class MM_AVL_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BalancedRoot = MM_AVL_NODE()
self.DepthOfTree = v_uint32()
self.NodeHint = v_ptr32()
self.NodeFreeHint = v_ptr32()
class MM_SESSION_SPACE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ReferenceCount = v_uint32()
self.u = _unnamed_35601()
self.SessionId = v_uint32()
self.ProcessReferenceToSession = v_uint32()
self.ProcessList = LIST_ENTRY()
self.SessionPageDirectoryIndex = v_uint32()
self.NonPagablePages = v_uint32()
self.CommittedPages = v_uint32()
self.PagedPoolStart = v_ptr32()
self.PagedPoolEnd = v_ptr32()
self.SessionObject = v_ptr32()
self.SessionObjectHandle = v_ptr32()
self.SessionPoolAllocationFailures = vstruct.VArray([ v_uint32() for i in xrange(4) ])
self.ImageList = LIST_ENTRY()
self.LocaleId = v_uint32()
self.AttachCount = v_uint32()
self.AttachGate = KGATE()
self.WsListEntry = LIST_ENTRY()
self._pad0080 = v_bytes(size=20)
self.Lookaside = vstruct.VArray([ GENERAL_LOOKASIDE() for i in xrange(24) ])
self.Session = MMSESSION()
self.PagedPoolInfo = MM_PAGED_POOL_INFO()
self.Vm = MMSUPPORT()
self.Wsle = v_ptr32()
self.DriverUnload = MI_SESSION_DRIVER_UNLOAD()
self._pad0d80 = v_bytes(size=28)
self.PagedPool = POOL_DESCRIPTOR()
self.PageTables = v_ptr32()
self._pad1ec8 = v_bytes(size=4)
self.SpecialPool = MI_SPECIAL_POOL()
self.SessionPteLock = FAST_MUTEX()
self.PoolBigEntriesInUse = v_uint32()
self.PagedPoolPdeCount = v_uint32()
self.SpecialPoolPdeCount = v_uint32()
self.DynamicSessionPdeCount = v_uint32()
self.SystemPteInfo = MI_SYSTEM_PTE_TYPE()
self.PoolTrackTableExpansion = v_ptr32()
self.PoolTrackTableExpansionSize = v_uint32()
self.PoolTrackBigPages = v_ptr32()
self.PoolTrackBigPagesSize = v_uint32()
self.IoState = v_uint32()
self.IoStateSequence = v_uint32()
self.IoNotificationEvent = KEVENT()
self.SessionPoolPdes = RTL_BITMAP()
self._pad1fc0 = v_bytes(size=28)
class WHEA_ERROR_RECORD_HEADER_VALIDBITS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PlatformId = v_uint32()
class CM_NAME_CONTROL_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Compressed = v_uint32()
self.NameHash = CM_NAME_HASH()
class _unnamed_27770(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Create = _unnamed_27833()
class KDEVICE_QUEUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.DeviceListHead = LIST_ENTRY()
self.Lock = v_uint32()
self.Busy = v_uint8()
self._pad0014 = v_bytes(size=3)
class ARBITER_RETEST_ALLOCATION_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ArbitrationList = v_ptr32()
self.AllocateFromCount = v_uint32()
self.AllocateFrom = v_ptr32()
class NT_TIB32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExceptionList = v_uint32()
self.StackBase = v_uint32()
self.StackLimit = v_uint32()
self.SubSystemTib = v_uint32()
self.FiberData = v_uint32()
self.ArbitraryUserPointer = v_uint32()
self.Self = v_uint32()
class ALPC_COMPLETION_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Entry = LIST_ENTRY()
self.OwnerProcess = v_ptr32()
self.CompletionListLock = EX_PUSH_LOCK()
self.Mdl = v_ptr32()
self.UserVa = v_ptr32()
self.UserLimit = v_ptr32()
self.DataUserVa = v_ptr32()
self.SystemVa = v_ptr32()
self.TotalSize = v_uint32()
self.Header = v_ptr32()
self.List = v_ptr32()
self.ListSize = v_uint32()
self.Bitmap = v_ptr32()
self.BitmapSize = v_uint32()
self.Data = v_ptr32()
self.DataSize = v_uint32()
self.BitmapLimit = v_uint32()
self.BitmapNextHint = v_uint32()
self.ConcurrencyCount = v_uint32()
self.AttributeFlags = v_uint32()
self.AttributeSize = v_uint32()
class _unnamed_34392(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Notification = v_ptr32()
class PORT_MESSAGE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.u1 = _unnamed_30748()
self.u2 = _unnamed_30749()
self.ClientId = CLIENT_ID()
self.MessageId = v_uint32()
self.ClientViewSize = v_uint32()
class RELATIVE_SYMLINK_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExposedNamespaceLength = v_uint16()
self.Flags = v_uint16()
self.DeviceNameLength = v_uint16()
self.Reserved = v_uint16()
self.InteriorMountPoint = v_ptr32()
self.OpenedName = UNICODE_STRING()
class MI_VAD_EVENT_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.WaitReason = v_uint32()
self.Gate = KGATE()
class IO_SECURITY_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityQos = v_ptr32()
self.AccessState = v_ptr32()
self.DesiredAccess = v_uint32()
self.FullCreateOptions = v_uint32()
class TERMINATION_PORT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.Port = v_ptr32()
class VF_AVL_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.RtlTable = RTL_AVL_TABLE()
self.ReservedNode = v_ptr32()
self.NodeToFree = v_ptr32()
self.Lock = v_uint32()
self._pad0080 = v_bytes(size=60)
class POP_FX_DEVICE_STATUS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Value = v_uint32()
class IO_CLIENT_EXTENSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NextExtension = v_ptr32()
self.ClientIdentificationAddress = v_ptr32()
class INITIAL_PRIVILEGE_SET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PrivilegeCount = v_uint32()
self.Control = v_uint32()
self.Privilege = vstruct.VArray([ LUID_AND_ATTRIBUTES() for i in xrange(3) ])
class OBJECT_REF_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ObjectHeader = v_ptr32()
self.NextRef = v_ptr32()
self.ImageFileName = vstruct.VArray([ v_uint8() for i in xrange(16) ])
self.NextPos = v_uint16()
self.MaxStacks = v_uint16()
self.StackInfo = vstruct.VArray([ OBJECT_REF_STACK_INFO() for i in xrange(0) ])
class GENERAL_LOOKASIDE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListHead = SLIST_HEADER()
self.Depth = v_uint16()
self.MaximumDepth = v_uint16()
self.TotalAllocates = v_uint32()
self.AllocateMisses = v_uint32()
self.TotalFrees = v_uint32()
self.FreeMisses = v_uint32()
self.Type = v_uint32()
self.Tag = v_uint32()
self.Size = v_uint32()
self.AllocateEx = v_ptr32()
self.FreeEx = v_ptr32()
self.ListEntry = LIST_ENTRY()
self.LastTotalAllocates = v_uint32()
self.LastAllocateMisses = v_uint32()
self.Future = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self._pad0080 = v_bytes(size=56)
class POP_PER_PROCESSOR_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.UncompressedData = v_ptr32()
self.MappingVa = v_ptr32()
self.XpressEncodeWorkspace = v_ptr32()
self.CompressedDataBuffer = v_ptr32()
self.CopyTicks = v_uint64()
self.CompressTicks = v_uint64()
self.BytesCopied = v_uint64()
self.PagesProcessed = v_uint64()
self.DecompressTicks = v_uint64()
self.ResumeCopyTicks = v_uint64()
self.SharedBufferTicks = v_uint64()
self.DecompressTicksByMethod = vstruct.VArray([ v_uint64() for i in xrange(2) ])
self.DecompressSizeByMethod = vstruct.VArray([ v_uint64() for i in xrange(2) ])
self.CompressCount = v_uint32()
self.HuffCompressCount = v_uint32()
class DBGKD_QUERY_SPECIAL_CALLS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NumberOfSpecialCalls = v_uint32()
class WHEA_ERROR_RECORD_HEADER_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Recovered = v_uint32()
class KTIMER_TABLE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Lock = v_uint32()
self.Entry = LIST_ENTRY()
self._pad0010 = v_bytes(size=4)
self.Time = ULARGE_INTEGER()
class HMAP_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BlockAddress = v_uint32()
self.BinAddress = v_uint32()
self.MemAlloc = v_uint32()
class DUMP_STACK_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Init = DUMP_INITIALIZATION_CONTEXT()
self.PartitionOffset = LARGE_INTEGER()
self.DumpPointers = v_ptr32()
self.PointersLength = v_uint32()
self.ModulePrefix = v_ptr32()
self.DriverList = LIST_ENTRY()
self.InitMsg = STRING()
self.ProgMsg = STRING()
self.DoneMsg = STRING()
self.FileObject = v_ptr32()
self.UsageType = v_uint32()
self._pad0100 = v_bytes(size=4)
class PNP_DEVICE_EVENT_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Status = v_uint32()
self.EventQueueMutex = KMUTANT()
self.Lock = FAST_MUTEX()
self.List = LIST_ENTRY()
class VF_BTS_RECORD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.JumpedFrom = v_ptr32()
self.JumpedTo = v_ptr32()
self.Unused1 = v_uint32()
class KWAIT_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WaitListEntry = LIST_ENTRY()
self.WaitType = v_uint8()
self.BlockState = v_uint8()
self.WaitKey = v_uint16()
self.Thread = v_ptr32()
self.Object = v_ptr32()
self.SparePtr = v_ptr32()
class DBGKD_READ_WRITE_IO32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DataSize = v_uint32()
self.IoAddress = v_uint32()
self.DataValue = v_uint32()
class POP_HIBER_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Reset = v_uint8()
self.HiberFlags = v_uint8()
self.WroteHiberFile = v_uint8()
self.VerifyKernelPhaseOnResume = v_uint8()
self.KernelPhaseVerificationActive = v_uint8()
self.InitializationFinished = v_uint8()
self._pad0008 = v_bytes(size=2)
self.NextTableLockHeld = v_uint32()
self.BootPhaseFinishedBarrier = v_uint32()
self.KernelResumeFinishedBarrier = v_uint32()
self.MapFrozen = v_uint8()
self._pad0018 = v_bytes(size=3)
self.DiscardMap = RTL_BITMAP()
self.BootPhaseMap = RTL_BITMAP()
self.ClonedRanges = LIST_ENTRY()
self.ClonedRangeCount = v_uint32()
self._pad0038 = v_bytes(size=4)
self.ClonedPageCount = v_uint64()
self.CurrentMap = v_ptr32()
self.NextCloneRange = v_ptr32()
self.NextPreserve = v_uint32()
self.LoaderMdl = v_ptr32()
self.AllocatedMdl = v_ptr32()
self._pad0058 = v_bytes(size=4)
self.PagesOut = v_uint64()
self.IoPages = v_ptr32()
self.IoPagesCount = v_uint32()
self.CurrentMcb = v_ptr32()
self.DumpStack = v_ptr32()
self.WakeState = v_ptr32()
self.IoProgress = v_uint32()
self.Status = v_uint32()
self.GraphicsProc = v_uint32()
self.MemoryImage = v_ptr32()
self.PerformanceStats = v_ptr32()
self.BootLoaderLogMdl = v_ptr32()
self.SiLogOffset = v_uint32()
self.FirmwareRuntimeInformationMdl = v_ptr32()
self.FirmwareRuntimeInformationVa = v_ptr32()
self.ResumeContext = v_ptr32()
self.ResumeContextPages = v_uint32()
self.ProcessorCount = v_uint32()
self.ProcessorContext = v_ptr32()
self.ProdConsBuffer = v_ptr32()
self.ProdConsSize = v_uint32()
self.MaxDataPages = v_uint32()
self.ExtraBuffer = v_ptr32()
self.ExtraBufferSize = v_uint32()
self.ExtraMapVa = v_ptr32()
self.BitlockerKeyPFN = v_uint32()
self._pad00c8 = v_bytes(size=4)
self.IoInfo = POP_IO_INFO()
self.HardwareConfigurationSignature = v_uint32()
self._pad0120 = v_bytes(size=4)
class _unnamed_33210(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeviceClass = _unnamed_34384()
self._pad0020 = v_bytes(size=12)
class ARMCE_DBGKD_CONTROL_SET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Continue = v_uint32()
self.CurrentSymbolStart = v_uint32()
self.CurrentSymbolEnd = v_uint32()
class ACTIVATION_CONTEXT_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class MMPTE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Valid = v_uint64()
class RTL_BALANCED_NODE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Children = vstruct.VArray([ v_ptr32() for i in xrange(2) ])
self.Red = v_uint8()
self._pad000c = v_bytes(size=3)
class FILE_NETWORK_OPEN_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CreationTime = LARGE_INTEGER()
self.LastAccessTime = LARGE_INTEGER()
self.LastWriteTime = LARGE_INTEGER()
self.ChangeTime = LARGE_INTEGER()
self.AllocationSize = LARGE_INTEGER()
self.EndOfFile = LARGE_INTEGER()
self.FileAttributes = v_uint32()
self._pad0038 = v_bytes(size=4)
class PROCESSOR_NUMBER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Group = v_uint16()
self.Number = v_uint8()
self.Reserved = v_uint8()
class RTL_DRIVE_LETTER_CURDIR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint16()
self.Length = v_uint16()
self.TimeStamp = v_uint32()
self.DosPath = STRING()
class WHEAP_ERROR_RECORD_WRAPPER_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Preallocated = v_uint32()
class VF_TRACKER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TrackerFlags = v_uint32()
self.TrackerSize = v_uint32()
self.TrackerIndex = v_uint32()
self.TraceDepth = v_uint32()
class CACHE_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Level = v_uint8()
self.Associativity = v_uint8()
self.LineSize = v_uint16()
self.Size = v_uint32()
self.Type = v_uint32()
class VF_BTS_DATA_MANAGEMENT_AREA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BTSBufferBase = v_ptr32()
self.BTSIndex = v_ptr32()
self.BTSMax = v_ptr32()
self.BTSInterruptThreshold = v_ptr32()
self.PEBSBufferBase = v_ptr32()
self.PEBSIndex = v_ptr32()
self.PEBSMax = v_ptr32()
self.PEBSInterruptThreshold = v_ptr32()
self.PEBSCounterReset = vstruct.VArray([ v_ptr32() for i in xrange(2) ])
self.Reserved = vstruct.VArray([ v_uint8() for i in xrange(12) ])
class ARBITER_QUERY_ARBITRATE_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ArbitrationList = v_ptr32()
class _unnamed_34134(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length64 = v_uint32()
self.Alignment64 = v_uint32()
self.MinimumAddress = LARGE_INTEGER()
self.MaximumAddress = LARGE_INTEGER()
class TXN_PARAMETER_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint16()
self.TxFsContext = v_uint16()
self.TransactionObject = v_ptr32()
class ULARGE_INTEGER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.HighPart = v_uint32()
class TEB_ACTIVE_FRAME(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint32()
self.Previous = v_ptr32()
self.Context = v_ptr32()
class ETIMER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.KeTimer = KTIMER()
self.Lock = v_uint32()
self.TimerApc = KAPC()
self.TimerDpc = KDPC()
self.ActiveTimerListEntry = LIST_ENTRY()
self.Period = v_uint32()
self.TimerFlags = v_uint8()
self.DueTimeType = v_uint8()
self.Spare2 = v_uint16()
self.WakeReason = v_ptr32()
self.WakeTimerListEntry = LIST_ENTRY()
self.VirtualizedTimerCookie = v_ptr32()
self.VirtualizedTimerLinks = LIST_ENTRY()
self._pad00a8 = v_bytes(size=4)
self.DueTime = v_uint64()
self.CoalescingWindow = v_uint32()
self._pad00b8 = v_bytes(size=4)
class DBGKD_LOAD_SYMBOLS64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PathNameLength = v_uint32()
self._pad0008 = v_bytes(size=4)
self.BaseOfDll = v_uint64()
self.ProcessId = v_uint64()
self.CheckSum = v_uint32()
self.SizeOfImage = v_uint32()
self.UnloadSymbols = v_uint8()
self._pad0028 = v_bytes(size=7)
class _unnamed_34139(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Class = v_uint8()
self.Type = v_uint8()
self.Reserved1 = v_uint8()
self.Reserved2 = v_uint8()
self.IdLowPart = v_uint32()
self.IdHighPart = v_uint32()
class _unnamed_28834(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ReferenceCount = v_uint16()
self.ShortFlags = v_uint16()
class FREE_DISPLAY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.RealVectorSize = v_uint32()
self.Hint = v_uint32()
self.Display = RTL_BITMAP()
class _unnamed_30210(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ReadMemory = DBGKD_READ_MEMORY32()
self._pad0028 = v_bytes(size=28)
class POP_FX_COMPONENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Id = GUID()
self.Index = v_uint32()
self.WorkOrder = POP_FX_WORK_ORDER()
self.Device = v_ptr32()
self.Flags = POP_FX_COMPONENT_FLAGS()
self.Resident = v_uint32()
self.ActiveEvent = KEVENT()
self.IdleLock = v_uint32()
self.IdleConditionComplete = v_uint32()
self.IdleStateComplete = v_uint32()
self._pad0058 = v_bytes(size=4)
self.IdleStamp = v_uint64()
self.CurrentIdleState = v_uint32()
self.IdleStateCount = v_uint32()
self.IdleStates = v_ptr32()
self.DeepestWakeableIdleState = v_uint32()
self.ProviderCount = v_uint32()
self.Providers = v_ptr32()
self.IdleProviderCount = v_uint32()
self.DependentCount = v_uint32()
self.Dependents = v_ptr32()
self._pad0088 = v_bytes(size=4)
class MM_PAGE_ACCESS_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = MM_PAGE_ACCESS_INFO_FLAGS()
self.PointerProtoPte = v_ptr32()
class ARBITER_ORDERING_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint16()
self.Maximum = v_uint16()
self.Orderings = v_ptr32()
class _unnamed_25455(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.HighPart = v_uint32()
class OBJECT_DIRECTORY_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ChainLink = v_ptr32()
self.Object = v_ptr32()
self.HashValue = v_uint32()
class DEVICE_OBJECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.ReferenceCount = v_uint32()
self.DriverObject = v_ptr32()
self.NextDevice = v_ptr32()
self.AttachedDevice = v_ptr32()
self.CurrentIrp = v_ptr32()
self.Timer = v_ptr32()
self.Flags = v_uint32()
self.Characteristics = v_uint32()
self.Vpb = v_ptr32()
self.DeviceExtension = v_ptr32()
self.DeviceType = v_uint32()
self.StackSize = v_uint8()
self._pad0034 = v_bytes(size=3)
self.Queue = _unnamed_27515()
self.AlignmentRequirement = v_uint32()
self.DeviceQueue = KDEVICE_QUEUE()
self.Dpc = KDPC()
self.ActiveThreadCount = v_uint32()
self.SecurityDescriptor = v_ptr32()
self.DeviceLock = KEVENT()
self.SectorSize = v_uint16()
self.Spare1 = v_uint16()
self.DeviceObjectExtension = v_ptr32()
self.Reserved = v_ptr32()
class CM_KEY_HASH(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ConvKey = v_uint32()
self.NextHash = v_ptr32()
self.KeyHive = v_ptr32()
self.KeyCell = v_uint32()
class PPM_CONCURRENCY_ACCOUNTING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Lock = v_uint32()
self.Processors = v_uint32()
self.ActiveProcessors = v_uint32()
self._pad0010 = v_bytes(size=4)
self.LastUpdateTime = v_uint64()
self.TotalTime = v_uint64()
self.AccumulatedTime = vstruct.VArray([ v_uint64() for i in xrange(1) ])
class KTMNOTIFICATION_PACKET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class ARBITER_LIST_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.AlternativeCount = v_uint32()
self.Alternatives = v_ptr32()
self.PhysicalDeviceObject = v_ptr32()
self.RequestSource = v_uint32()
self.Flags = v_uint32()
self.WorkSpace = v_uint32()
self.InterfaceType = v_uint32()
self.SlotNumber = v_uint32()
self.BusNumber = v_uint32()
self.Assignment = v_ptr32()
self.SelectedAlternative = v_ptr32()
self.Result = v_uint32()
class _unnamed_30634(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CriticalSection = RTL_CRITICAL_SECTION()
self._pad0038 = v_bytes(size=32)
class _unnamed_33507(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LongFlags = v_uint32()
class PROC_PERF_HISTORY_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Utility = v_uint16()
self.AffinitizedUtility = v_uint16()
self.Frequency = v_uint8()
self.Reserved = v_uint8()
class KGDTENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LimitLow = v_uint16()
self.BaseLow = v_uint16()
self.HighWord = _unnamed_26252()
class MMPFNENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PageLocation = v_uint8()
self.Priority = v_uint8()
class WHEA_IPF_CPE_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Enabled = v_uint8()
self.Reserved = v_uint8()
class NT_TIB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExceptionList = v_ptr32()
self.StackBase = v_ptr32()
self.StackLimit = v_ptr32()
self.SubSystemTib = v_ptr32()
self.FiberData = v_ptr32()
self.ArbitraryUserPointer = v_ptr32()
self.Self = v_ptr32()
class MI_SESSION_DRIVER_UNLOAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Function = v_ptr32()
class ARBITER_TEST_ALLOCATION_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ArbitrationList = v_ptr32()
self.AllocateFromCount = v_uint32()
self.AllocateFrom = v_ptr32()
class POWER_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SystemState = v_uint32()
class MI_CONTROL_AREA_WAIT_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.WaitReason = v_uint32()
self.WaitResponse = v_uint32()
self.Gate = KGATE()
class UNICODE_STRING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint16()
self.MaximumLength = v_uint16()
self.Buffer = v_ptr32()
class CELL_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.u = u()
class NONOPAQUE_OPLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IrpExclusiveOplock = v_ptr32()
self.FileObject = v_ptr32()
self.ExclusiveOplockOwner = v_ptr32()
self.ExclusiveOplockOwnerThread = v_ptr32()
self.WaiterPriority = v_uint8()
self._pad0014 = v_bytes(size=3)
self.IrpOplocksR = LIST_ENTRY()
self.IrpOplocksRH = LIST_ENTRY()
self.RHBreakQueue = LIST_ENTRY()
self.WaitingIrps = LIST_ENTRY()
self.DelayAckFileObjectQueue = LIST_ENTRY()
self.AtomicQueue = LIST_ENTRY()
self.DeleterParentKey = v_ptr32()
self.OplockState = v_uint32()
self.FastMutex = v_ptr32()
class HEAP_LIST_LOOKUP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExtendedLookup = v_ptr32()
self.ArraySize = v_uint32()
self.ExtraItem = v_uint32()
self.ItemCount = v_uint32()
self.OutOfRangeItems = v_uint32()
self.BaseIndex = v_uint32()
self.ListHead = v_ptr32()
self.ListsInUseUlong = v_ptr32()
self.ListHints = v_ptr32()
class CM_KEY_SECURITY_CACHE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Cell = v_uint32()
self.ConvKey = v_uint32()
self.List = LIST_ENTRY()
self.DescriptorLength = v_uint32()
self.RealRefCount = v_uint32()
self.Descriptor = SECURITY_DESCRIPTOR_RELATIVE()
class _unnamed_35601(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LongFlags = v_uint32()
class DUMMY_FILE_OBJECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ObjectHeader = OBJECT_HEADER()
self.FileObjectBody = vstruct.VArray([ v_uint8() for i in xrange(128) ])
class COMPRESSED_DATA_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CompressionFormatAndEngine = v_uint16()
self.CompressionUnitShift = v_uint8()
self.ChunkShift = v_uint8()
self.ClusterShift = v_uint8()
self.Reserved = v_uint8()
self.NumberOfChunks = v_uint16()
self.CompressedChunkSizes = vstruct.VArray([ v_uint32() for i in xrange(1) ])
class SID_AND_ATTRIBUTES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Sid = v_ptr32()
self.Attributes = v_uint32()
class CM_RM(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.RmListEntry = LIST_ENTRY()
self.TransactionListHead = LIST_ENTRY()
self.TmHandle = v_ptr32()
self.Tm = v_ptr32()
self.RmHandle = v_ptr32()
self.KtmRm = v_ptr32()
self.RefCount = v_uint32()
self.ContainerNum = v_uint32()
self.ContainerSize = v_uint64()
self.CmHive = v_ptr32()
self.LogFileObject = v_ptr32()
self.MarshallingContext = v_ptr32()
self.RmFlags = v_uint32()
self.LogStartStatus1 = v_uint32()
self.LogStartStatus2 = v_uint32()
self.BaseLsn = v_uint64()
self.RmLock = v_ptr32()
self._pad0058 = v_bytes(size=4)
class MI_USER_VA_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NumberOfCommittedPageTables = v_uint32()
self.PhysicalMappingCount = v_uint32()
self.VadBitMapHint = v_uint32()
self.LastAllocationSizeHint = v_uint32()
self.LastAllocationSize = v_uint32()
self.LowestBottomUpVadBit = v_uint32()
self.VadBitMapSize = v_uint32()
self.MaximumLastVadBit = v_uint32()
self.VadsBeingDeleted = v_uint32()
self.LastVadDeletionEvent = v_ptr32()
self.VadBitBuffer = v_ptr32()
self.LowestBottomUpAllocationAddress = v_ptr32()
self.HighestTopDownAllocationAddress = v_ptr32()
self.FreeTebHint = v_ptr32()
self.PrivateFixupVadCount = v_uint32()
self.UsedPageTableEntries = vstruct.VArray([ v_uint16() for i in xrange(1536) ])
self.CommittedPageTables = vstruct.VArray([ v_uint32() for i in xrange(48) ])
class COLORED_PAGE_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BeingZeroed = v_uint32()
self.Processor = v_uint32()
self.PagesQueued = v_uint32()
self.PfnAllocation = v_ptr32()
class EPROCESS_QUOTA_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class _unnamed_27849(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityContext = v_ptr32()
self.Options = v_uint32()
self.Reserved = v_uint16()
self.ShareAccess = v_uint16()
self.Parameters = v_ptr32()
class PHYSICAL_MEMORY_RUN(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BasePage = v_uint32()
self.PageCount = v_uint32()
class FILE_SEGMENT_ELEMENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Buffer = v_ptr64()
class _unnamed_28106(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Lock = v_uint8()
class PENDING_RELATIONS_LIST_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Link = LIST_ENTRY()
self.WorkItem = WORK_QUEUE_ITEM()
self.DeviceEvent = v_ptr32()
self.DeviceObject = v_ptr32()
self.RelationsList = v_ptr32()
self.EjectIrp = v_ptr32()
self.Lock = v_uint32()
self.Problem = v_uint32()
self.ProfileChangingEject = v_uint8()
self.DisplaySafeRemovalDialog = v_uint8()
self._pad0034 = v_bytes(size=2)
self.LightestSleepState = v_uint32()
self.DockInterface = v_ptr32()
class LDRP_DLL_SNAP_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class OBJECT_HEADER_NAME_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Directory = v_ptr32()
self.Name = UNICODE_STRING()
self.ReferenceCount = v_uint32()
class ACCESS_REASONS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Data = vstruct.VArray([ v_uint32() for i in xrange(32) ])
class CM_KCB_UOW(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TransactionListEntry = LIST_ENTRY()
self.KCBLock = v_ptr32()
self.KeyLock = v_ptr32()
self.KCBListEntry = LIST_ENTRY()
self.KeyControlBlock = v_ptr32()
self.Transaction = v_ptr32()
self.UoWState = v_uint32()
self.ActionType = v_uint32()
self.StorageType = v_uint32()
self._pad0030 = v_bytes(size=4)
self.ChildKCB = v_ptr32()
self.NewValueCell = v_uint32()
class DBGKD_ANY_CONTROL_SET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.X86ControlSet = X86_DBGKD_CONTROL_SET()
self._pad001c = v_bytes(size=12)
class _unnamed_28115(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IdType = v_uint32()
class MMSUPPORT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WorkingSetMutex = EX_PUSH_LOCK()
self.ExitGate = v_ptr32()
self.AccessLog = v_ptr32()
self.WorkingSetExpansionLinks = LIST_ENTRY()
self.AgeDistribution = vstruct.VArray([ v_uint32() for i in xrange(7) ])
self.MinimumWorkingSetSize = v_uint32()
self.WorkingSetSize = v_uint32()
self.WorkingSetPrivateSize = v_uint32()
self.MaximumWorkingSetSize = v_uint32()
self.ChargedWslePages = v_uint32()
self.ActualWslePages = v_uint32()
self.WorkingSetSizeOverhead = v_uint32()
self.PeakWorkingSetSize = v_uint32()
self.HardFaultCount = v_uint32()
self.VmWorkingSetList = v_ptr32()
self.NextPageColor = v_uint16()
self.LastTrimStamp = v_uint16()
self.PageFaultCount = v_uint32()
self.TrimmedPageCount = v_uint32()
self.ForceTrimPages = v_uint32()
self.Flags = MMSUPPORT_FLAGS()
self.WsSwapSupport = v_ptr32()
class LOGGED_STREAM_CALLBACK_V2(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LogHandleContext = v_ptr32()
class POP_IRP_WORKER_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Link = LIST_ENTRY()
self.Thread = v_ptr32()
self.Irp = v_ptr32()
self.Device = v_ptr32()
self.Static = v_uint8()
self._pad0018 = v_bytes(size=3)
class HBASE_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.Sequence1 = v_uint32()
self.Sequence2 = v_uint32()
self.TimeStamp = LARGE_INTEGER()
self.Major = v_uint32()
self.Minor = v_uint32()
self.Type = v_uint32()
self.Format = v_uint32()
self.RootCell = v_uint32()
self.Length = v_uint32()
self.Cluster = v_uint32()
self.FileName = vstruct.VArray([ v_uint8() for i in xrange(64) ])
self.RmId = GUID()
self.LogId = GUID()
self.Flags = v_uint32()
self.TmId = GUID()
self.GuidSignature = v_uint32()
self.LastReorganizeTime = v_uint64()
self.Reserved1 = vstruct.VArray([ v_uint32() for i in xrange(83) ])
self.CheckSum = v_uint32()
self.Reserved2 = vstruct.VArray([ v_uint32() for i in xrange(882) ])
self.ThawTmId = GUID()
self.ThawRmId = GUID()
self.ThawLogId = GUID()
self.BootType = v_uint32()
self.BootRecover = v_uint32()
class _unnamed_30882(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.s1 = _unnamed_30991()
class BUS_EXTENSION_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.BusExtension = v_ptr32()
class _unnamed_30748(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.s1 = _unnamed_30750()
class _unnamed_30749(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.s2 = _unnamed_30755()
class MMVAD_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.VadType = v_uint32()
class DBGKD_GET_SET_BUS_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BusDataType = v_uint32()
self.BusNumber = v_uint32()
self.SlotNumber = v_uint32()
self.Offset = v_uint32()
self.Length = v_uint32()
class _unnamed_28101(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WhichSpace = v_uint32()
self.Buffer = v_ptr32()
self.Offset = v_uint32()
self.Length = v_uint32()
class KDPC(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint8()
self.Importance = v_uint8()
self.Number = v_uint16()
self.DpcListEntry = LIST_ENTRY()
self.DeferredRoutine = v_ptr32()
self.DeferredContext = v_ptr32()
self.SystemArgument1 = v_ptr32()
self.SystemArgument2 = v_ptr32()
self.DpcData = v_ptr32()
class EVENT_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self.HeaderType = v_uint16()
self.Flags = v_uint16()
self.EventProperty = v_uint16()
self.ThreadId = v_uint32()
self.ProcessId = v_uint32()
self.TimeStamp = LARGE_INTEGER()
self.ProviderId = GUID()
self.EventDescriptor = EVENT_DESCRIPTOR()
self.KernelTime = v_uint32()
self.UserTime = v_uint32()
self.ActivityId = GUID()
class HAL_PMC_COUNTERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class KEVENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DISPATCHER_HEADER()
class KRESOURCEMANAGER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NotificationAvailable = KEVENT()
self.cookie = v_uint32()
self.State = v_uint32()
self.Flags = v_uint32()
self.Mutex = KMUTANT()
self.NamespaceLink = KTMOBJECT_NAMESPACE_LINK()
self.RmId = GUID()
self.NotificationQueue = KQUEUE()
self.NotificationMutex = KMUTANT()
self.EnlistmentHead = LIST_ENTRY()
self.EnlistmentCount = v_uint32()
self.NotificationRoutine = v_ptr32()
self.Key = v_ptr32()
self.ProtocolListHead = LIST_ENTRY()
self.PendingPropReqListHead = LIST_ENTRY()
self.CRMListEntry = LIST_ENTRY()
self.Tm = v_ptr32()
self.Description = UNICODE_STRING()
self.Enlistments = KTMOBJECT_NAMESPACE()
self.CompletionBinding = KRESOURCEMANAGER_COMPLETION_BINDING()
class CM_NAME_HASH(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ConvKey = v_uint32()
self.NextHash = v_ptr32()
self.NameLength = v_uint16()
self.Name = vstruct.VArray([ v_uint16() for i in xrange(1) ])
class MM_PAGE_ACCESS_INFO_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Link = SINGLE_LIST_ENTRY()
self.Type = v_uint32()
self.EmptySequenceNumber = v_uint32()
self._pad0010 = v_bytes(size=4)
self.CreateTime = v_uint64()
self.EmptyTime = v_uint64()
self.PageEntry = v_ptr32()
self.FileEntry = v_ptr32()
self.FirstFileEntry = v_ptr32()
self.Process = v_ptr32()
self.SessionId = v_uint32()
self._pad0038 = v_bytes(size=4)
class IMAGE_DEBUG_DIRECTORY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Characteristics = v_uint32()
self.TimeDateStamp = v_uint32()
self.MajorVersion = v_uint16()
self.MinorVersion = v_uint16()
self.Type = v_uint32()
self.SizeOfData = v_uint32()
self.AddressOfRawData = v_uint32()
self.PointerToRawData = v_uint32()
class MCGEN_TRACE_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.RegistrationHandle = v_uint64()
self.Logger = v_uint64()
self.MatchAnyKeyword = v_uint64()
self.MatchAllKeyword = v_uint64()
self.Flags = v_uint32()
self.IsEnabled = v_uint32()
self.Level = v_uint8()
self.Reserve = v_uint8()
self.EnableBitsCount = v_uint16()
self.EnableBitMask = v_ptr32()
self.EnableKeyWords = v_ptr32()
self.EnableLevel = v_ptr32()
class _unnamed_27733(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AsynchronousParameters = _unnamed_27748()
class CM_INTENT_LOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.OwnerCount = v_uint32()
self.OwnerTable = v_ptr32()
class KWAIT_STATUS_REGISTER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint8()
class _unnamed_34980(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.bits = _unnamed_36838()
class LAZY_WRITER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ScanDpc = KDPC()
self.ScanTimer = KTIMER()
self.ScanActive = v_uint8()
self.OtherWork = v_uint8()
self.PendingTeardownScan = v_uint8()
self.PendingPeriodicScan = v_uint8()
self.PendingLowMemoryScan = v_uint8()
self.PendingPowerScan = v_uint8()
self.PendingCoalescingFlushScan = v_uint8()
self._pad0050 = v_bytes(size=1)
class KALPC_SECURITY_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.HandleTable = v_ptr32()
self.ContextHandle = v_ptr32()
self.OwningProcess = v_ptr32()
self.OwnerPort = v_ptr32()
self.DynamicSecurity = SECURITY_CLIENT_CONTEXT()
self.u1 = _unnamed_31167()
class RELATION_LIST_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint32()
self.MaxCount = v_uint32()
self.Devices = vstruct.VArray([ v_ptr32() for i in xrange(1) ])
class DBGKD_SET_INTERNAL_BREAKPOINT32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BreakpointAddress = v_uint32()
self.Flags = v_uint32()
class THERMAL_INFORMATION_EX(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ThermalStamp = v_uint32()
self.ThermalConstant1 = v_uint32()
self.ThermalConstant2 = v_uint32()
self.SamplingPeriod = v_uint32()
self.CurrentTemperature = v_uint32()
self.PassiveTripPoint = v_uint32()
self.CriticalTripPoint = v_uint32()
self.ActiveTripPointCount = v_uint8()
self._pad0020 = v_bytes(size=3)
self.ActiveTripPoint = vstruct.VArray([ v_uint32() for i in xrange(10) ])
self.S4TransitionTripPoint = v_uint32()
class POP_THERMAL_ZONE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Link = LIST_ENTRY()
self.State = v_uint8()
self.Flags = v_uint8()
self.Mode = v_uint8()
self.PendingMode = v_uint8()
self.ActivePoint = v_uint8()
self.PendingActivePoint = v_uint8()
self._pad0010 = v_bytes(size=2)
self.HighPrecisionThrottle = v_uint32()
self.Throttle = v_uint32()
self.PendingThrottle = v_uint32()
self._pad0020 = v_bytes(size=4)
self.ThrottleStartTime = v_uint64()
self.LastTime = v_uint64()
self.SampleRate = v_uint32()
self.LastTemp = v_uint32()
self.PassiveTimer = KTIMER()
self.PassiveDpc = KDPC()
self.OverThrottled = POP_ACTION_TRIGGER()
self.Irp = v_ptr32()
self.Info = THERMAL_INFORMATION_EX()
self.InfoLastUpdateTime = LARGE_INTEGER()
self.Metrics = POP_THERMAL_ZONE_METRICS()
class POOL_HACKER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = POOL_HEADER()
self.Contents = vstruct.VArray([ v_uint32() for i in xrange(8) ])
class IO_REMOVE_LOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Common = IO_REMOVE_LOCK_COMMON_BLOCK()
class HANDLE_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NextHandleNeedingPool = v_uint32()
self.ExtraInfoPages = v_uint32()
self.TableCode = v_uint32()
self.QuotaProcess = v_ptr32()
self.HandleTableList = LIST_ENTRY()
self.UniqueProcessId = v_uint32()
self.Flags = v_uint32()
self.HandleContentionEvent = EX_PUSH_LOCK()
self.HandleTableLock = EX_PUSH_LOCK()
self.FreeLists = vstruct.VArray([ HANDLE_TABLE_FREE_LIST() for i in xrange(1) ])
class PO_HIBER_PERF(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.HiberIoTicks = v_uint64()
self.HiberIoCpuTicks = v_uint64()
self.HiberInitTicks = v_uint64()
self.HiberHiberFileTicks = v_uint64()
self.HiberCompressTicks = v_uint64()
self.HiberSharedBufferTicks = v_uint64()
self.TotalHibernateTime = LARGE_INTEGER()
self.POSTTime = v_uint32()
self.ResumeBootMgrTime = v_uint32()
self.BootmgrUserInputTime = v_uint32()
self._pad0048 = v_bytes(size=4)
self.ResumeAppTicks = v_uint64()
self.ResumeAppStartTimestamp = v_uint64()
self.ResumeLibraryInitTicks = v_uint64()
self.ResumeInitTicks = v_uint64()
self.ResumeRestoreImageStartTimestamp = v_uint64()
self.ResumeHiberFileTicks = v_uint64()
self.ResumeIoTicks = v_uint64()
self.ResumeDecompressTicks = v_uint64()
self.ResumeAllocateTicks = v_uint64()
self.ResumeUserInOutTicks = v_uint64()
self.ResumeMapTicks = v_uint64()
self.ResumeUnmapTicks = v_uint64()
self.ResumeKernelSwitchTimestamp = v_uint64()
self.WriteLogDataTimestamp = v_uint64()
self.KernelReturnFromHandler = v_uint64()
self.TimeStampCounterAtSwitchTime = v_uint64()
self.HalTscOffset = v_uint64()
self.HvlTscOffset = v_uint64()
self.SleeperThreadEnd = v_uint64()
self.KernelReturnSystemPowerStateTimestamp = v_uint64()
self.IoBoundedness = v_uint64()
self.KernelDecompressTicks = v_uint64()
self.KernelIoTicks = v_uint64()
self.KernelCopyTicks = v_uint64()
self.ReadCheckCount = v_uint64()
self.KernelInitTicks = v_uint64()
self.KernelResumeHiberFileTicks = v_uint64()
self.KernelIoCpuTicks = v_uint64()
self.KernelSharedBufferTicks = v_uint64()
self.KernelAnimationTicks = v_uint64()
self.AnimationStart = LARGE_INTEGER()
self.AnimationStop = LARGE_INTEGER()
self.DeviceResumeTime = v_uint32()
self._pad0150 = v_bytes(size=4)
self.BootPagesProcessed = v_uint64()
self.KernelPagesProcessed = v_uint64()
self.BootBytesWritten = v_uint64()
self.KernelBytesWritten = v_uint64()
self.BootPagesWritten = v_uint64()
self.KernelPagesWritten = v_uint64()
self.BytesWritten = v_uint64()
self.PagesWritten = v_uint32()
self.FileRuns = v_uint32()
self.NoMultiStageResumeReason = v_uint32()
self.MaxHuffRatio = v_uint32()
self.AdjustedTotalResumeTime = v_uint64()
self.ResumeCompleteTimestamp = v_uint64()
class DEFERRED_WRITE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NodeTypeCode = v_uint16()
self.NodeByteSize = v_uint16()
self.FileObject = v_ptr32()
self.BytesToWrite = v_uint32()
self.DeferredWriteLinks = LIST_ENTRY()
self.Event = v_ptr32()
self.PostRoutine = v_ptr32()
self.Context1 = v_ptr32()
self.Context2 = v_ptr32()
class _unnamed_30113(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ReadMemory = DBGKD_READ_MEMORY64()
self._pad0028 = v_bytes(size=24)
class _unnamed_34661(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Level = v_uint16()
self.Group = v_uint16()
self.Vector = v_uint32()
self.Affinity = v_uint32()
class ARBITER_INSTANCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.MutexEvent = v_ptr32()
self.Name = v_ptr32()
self.OrderingName = v_ptr32()
self.ResourceType = v_uint32()
self.Allocation = v_ptr32()
self.PossibleAllocation = v_ptr32()
self.OrderingList = ARBITER_ORDERING_LIST()
self.ReservedList = ARBITER_ORDERING_LIST()
self.ReferenceCount = v_uint32()
self.Interface = v_ptr32()
self.AllocationStackMaxSize = v_uint32()
self.AllocationStack = v_ptr32()
self.UnpackRequirement = v_ptr32()
self.PackResource = v_ptr32()
self.UnpackResource = v_ptr32()
self.ScoreRequirement = v_ptr32()
self.TestAllocation = v_ptr32()
self.RetestAllocation = v_ptr32()
self.CommitAllocation = v_ptr32()
self.RollbackAllocation = v_ptr32()
self.BootAllocation = v_ptr32()
self.QueryArbitrate = v_ptr32()
self.QueryConflict = v_ptr32()
self.AddReserved = v_ptr32()
self.StartArbiter = v_ptr32()
self.PreprocessEntry = v_ptr32()
self.AllocateEntry = v_ptr32()
self.GetNextAllocationRange = v_ptr32()
self.FindSuitableRange = v_ptr32()
self.AddAllocation = v_ptr32()
self.BacktrackAllocation = v_ptr32()
self.OverrideConflict = v_ptr32()
self.InitializeRangeList = v_ptr32()
self.TransactionInProgress = v_uint8()
self._pad0094 = v_bytes(size=3)
self.TransactionEvent = v_ptr32()
self.Extension = v_ptr32()
self.BusDeviceObject = v_ptr32()
self.ConflictCallbackContext = v_ptr32()
self.ConflictCallback = v_ptr32()
self.PdoDescriptionString = vstruct.VArray([ v_uint16() for i in xrange(336) ])
self.PdoSymbolicNameString = vstruct.VArray([ v_uint8() for i in xrange(672) ])
self.PdoAddressString = vstruct.VArray([ v_uint16() for i in xrange(1) ])
self._pad05ec = v_bytes(size=2)
class NAMED_PIPE_CREATE_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NamedPipeType = v_uint32()
self.ReadMode = v_uint32()
self.CompletionMode = v_uint32()
self.MaximumInstances = v_uint32()
self.InboundQuota = v_uint32()
self.OutboundQuota = v_uint32()
self.DefaultTimeout = LARGE_INTEGER()
self.TimeoutSpecified = v_uint8()
self._pad0028 = v_bytes(size=7)
class _unnamed_28021(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.StartSid = v_ptr32()
self.SidList = v_ptr32()
self.SidListLength = v_uint32()
class MMSUPPORT_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WorkingSetType = v_uint8()
self.SessionMaster = v_uint8()
self.MemoryPriority = v_uint8()
self.WsleDeleted = v_uint8()
class PROC_PERF_DOMAIN(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Link = LIST_ENTRY()
self.Master = v_ptr32()
self.Members = KAFFINITY_EX()
self.ProcessorCount = v_uint32()
self.Processors = v_ptr32()
self.GetFFHThrottleState = v_ptr32()
self.BoostPolicyHandler = v_ptr32()
self.BoostModeHandler = v_ptr32()
self.PerfSelectionHandler = v_ptr32()
self.PerfControlHandler = v_ptr32()
self.MaxFrequency = v_uint32()
self.NominalFrequency = v_uint32()
self.MaxPercent = v_uint32()
self.MinPerfPercent = v_uint32()
self.MinThrottlePercent = v_uint32()
self.Coordination = v_uint8()
self.HardPlatformCap = v_uint8()
self.AffinitizeControl = v_uint8()
self._pad004c = v_bytes(size=1)
self.SelectedPercent = v_uint32()
self.SelectedFrequency = v_uint32()
self.DesiredPercent = v_uint32()
self.MaxPolicyPercent = v_uint32()
self.MinPolicyPercent = v_uint32()
self.ConstrainedMaxPercent = v_uint32()
self.ConstrainedMinPercent = v_uint32()
self.GuaranteedPercent = v_uint32()
self.TolerancePercent = v_uint32()
self.SelectedState = v_uint64()
self.Force = v_uint8()
self._pad0080 = v_bytes(size=7)
self.PerfChangeTime = v_uint64()
self.PerfChangeIntervalCount = v_uint32()
self._pad0090 = v_bytes(size=4)
class EXCEPTION_REGISTRATION_RECORD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.Handler = v_ptr32()
class JOB_CPU_RATE_CONTROL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class FILE_BASIC_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CreationTime = LARGE_INTEGER()
self.LastAccessTime = LARGE_INTEGER()
self.LastWriteTime = LARGE_INTEGER()
self.ChangeTime = LARGE_INTEGER()
self.FileAttributes = v_uint32()
self._pad0028 = v_bytes(size=4)
class PLUGPLAY_EVENT_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.EventGuid = GUID()
self.EventCategory = v_uint32()
self.Result = v_ptr32()
self.Flags = v_uint32()
self.TotalSize = v_uint32()
self.DeviceObject = v_ptr32()
self.u = _unnamed_33210()
class LIST_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flink = v_ptr32()
self.Blink = v_ptr32()
class M128A(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Low = v_uint64()
self.High = v_uint64()
class WHEA_NOTIFICATION_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint8()
self.Length = v_uint8()
self.Flags = WHEA_NOTIFICATION_FLAGS()
self.u = _unnamed_34035()
class CM_KEY_SECURITY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint16()
self.Reserved = v_uint16()
self.Flink = v_uint32()
self.Blink = v_uint32()
self.ReferenceCount = v_uint32()
self.DescriptorLength = v_uint32()
self.Descriptor = SECURITY_DESCRIPTOR_RELATIVE()
class PNP_DEVICE_COMPLETION_QUEUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DispatchedList = LIST_ENTRY()
self.DispatchedCount = v_uint32()
self.CompletedList = LIST_ENTRY()
self.CompletedSemaphore = KSEMAPHORE()
self.SpinLock = v_uint32()
class CLIENT_ID(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.UniqueProcess = v_ptr32()
self.UniqueThread = v_ptr32()
class POP_ACTION_TRIGGER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint32()
self.Flags = v_uint32()
self.Wait = v_ptr32()
self.Battery = _unnamed_34192()
class ETW_REALTIME_CONSUMER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Links = LIST_ENTRY()
self.ProcessHandle = v_ptr32()
self.ProcessObject = v_ptr32()
self.NextNotDelivered = v_ptr32()
self.RealtimeConnectContext = v_ptr32()
self.DisconnectEvent = v_ptr32()
self.DataAvailableEvent = v_ptr32()
self.UserBufferCount = v_ptr32()
self.UserBufferListHead = v_ptr32()
self.BuffersLost = v_uint32()
self.EmptyBuffersCount = v_uint32()
self.LoggerId = v_uint16()
self.Flags = v_uint8()
self._pad0034 = v_bytes(size=1)
self.ReservedBufferSpaceBitMap = RTL_BITMAP()
self.ReservedBufferSpace = v_ptr32()
self.ReservedBufferSpaceSize = v_uint32()
self.UserPagesAllocated = v_uint32()
self.UserPagesReused = v_uint32()
class WHEA_ERROR_SOURCE_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.Version = v_uint32()
self.Type = v_uint32()
self.State = v_uint32()
self.MaxRawDataLength = v_uint32()
self.NumRecordsToPreallocate = v_uint32()
self.MaxSectionsPerRecord = v_uint32()
self.ErrorSourceId = v_uint32()
self.PlatformErrorSourceId = v_uint32()
self.Flags = v_uint32()
self.Info = _unnamed_32010()
class MI_EXTRA_IMAGE_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SizeOfHeaders = v_uint32()
self.SizeOfImage = v_uint32()
class DEVICE_MAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DosDevicesDirectory = v_ptr32()
self.GlobalDosDevicesDirectory = v_ptr32()
self.DosDevicesDirectoryHandle = v_ptr32()
self.ReferenceCount = v_uint32()
self.DriveMap = v_uint32()
self.DriveType = vstruct.VArray([ v_uint8() for i in xrange(32) ])
class DBGKD_SET_INTERNAL_BREAKPOINT64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BreakpointAddress = v_uint64()
self.Flags = v_uint32()
self._pad0010 = v_bytes(size=4)
class _unnamed_27748(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.UserApcRoutine = v_ptr32()
self.UserApcContext = v_ptr32()
class _unnamed_30805(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.s1 = _unnamed_30806()
class VI_TRACK_IRQL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Thread = v_ptr32()
self.OldIrql = v_uint8()
self.NewIrql = v_uint8()
self.Processor = v_uint16()
self.TickCount = v_uint32()
self.StackTrace = vstruct.VArray([ v_ptr32() for i in xrange(5) ])
class GUID(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Data1 = v_uint32()
self.Data2 = v_uint16()
self.Data3 = v_uint16()
self.Data4 = vstruct.VArray([ v_uint8() for i in xrange(8) ])
class HEAP_UCR_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.SegmentEntry = LIST_ENTRY()
self.Address = v_ptr32()
self.Size = v_uint32()
class _unnamed_30413(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FileOffset = LARGE_INTEGER()
class KSTACK_COUNT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Value = v_uint32()
class POP_SYSTEM_IDLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AverageIdleness = v_uint32()
self.LowestIdleness = v_uint32()
self.Time = v_uint32()
self.Timeout = v_uint32()
self.LastUserInput = v_uint32()
self.Action = POWER_ACTION_POLICY()
self.MinState = v_uint32()
self.SystemRequired = v_uint32()
self.IdleWorker = v_uint8()
self.Sampling = v_uint8()
self._pad0030 = v_bytes(size=6)
self.LastTick = v_uint64()
self.LastSystemRequiredTime = v_uint32()
self._pad0040 = v_bytes(size=4)
class KAPC_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ApcListHead = vstruct.VArray([ LIST_ENTRY() for i in xrange(2) ])
self.Process = v_ptr32()
self.KernelApcInProgress = v_uint8()
self.KernelApcPending = v_uint8()
self.UserApcPending = v_uint8()
self._pad0018 = v_bytes(size=1)
class COUNTER_READING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint32()
self.Index = v_uint32()
self.Start = v_uint64()
self.Total = v_uint64()
class MMVAD_SHORT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.VadNode = MM_AVL_NODE()
self.StartingVpn = v_uint32()
self.EndingVpn = v_uint32()
self.PushLock = EX_PUSH_LOCK()
self.u = _unnamed_35044()
self.u1 = _unnamed_35045()
self.EventList = v_ptr32()
self.ReferenceCount = v_uint32()
class DBGKD_GET_VERSION32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MajorVersion = v_uint16()
self.MinorVersion = v_uint16()
self.ProtocolVersion = v_uint16()
self.Flags = v_uint16()
self.KernBase = v_uint32()
self.PsLoadedModuleList = v_uint32()
self.MachineType = v_uint16()
self.ThCallbackStack = v_uint16()
self.NextCallback = v_uint16()
self.FramePointer = v_uint16()
self.KiCallUserMode = v_uint32()
self.KeUserCallbackDispatcher = v_uint32()
self.BreakpointWithStatus = v_uint32()
self.DebuggerDataList = v_uint32()
class MI_PHYSMEM_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IoTracker = v_ptr32()
class RTL_AVL_TREE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Root = v_ptr32()
class CM_CELL_REMAP_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.OldCell = v_uint32()
self.NewCell = v_uint32()
class PEBS_DS_SAVE_AREA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BtsBufferBase = v_uint64()
self.BtsIndex = v_uint64()
self.BtsAbsoluteMaximum = v_uint64()
self.BtsInterruptThreshold = v_uint64()
self.PebsBufferBase = v_uint64()
self.PebsIndex = v_uint64()
self.PebsAbsoluteMaximum = v_uint64()
self.PebsInterruptThreshold = v_uint64()
self.PebsCounterReset0 = v_uint64()
self.PebsCounterReset1 = v_uint64()
self.PebsCounterReset2 = v_uint64()
self.PebsCounterReset3 = v_uint64()
class KDPC_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DpcListHead = LIST_ENTRY()
self.DpcLock = v_uint32()
self.DpcQueueDepth = v_uint32()
self.DpcCount = v_uint32()
class KIDTENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Offset = v_uint16()
self.Selector = v_uint16()
self.Access = v_uint16()
self.ExtendedOffset = v_uint16()
class _unnamed_27940(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.CompletionFilter = v_uint32()
class _unnamed_27943(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.FileInformationClass = v_uint32()
class _unnamed_27946(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.FileInformationClass = v_uint32()
self.FileObject = v_ptr32()
self.ReplaceIfExists = v_uint8()
self.AdvanceOnly = v_uint8()
self._pad0010 = v_bytes(size=2)
class XSAVE_AREA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LegacyState = XSAVE_FORMAT()
self.Header = XSAVE_AREA_HEADER()
class MMINPAGE_SUPPORT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.Thread = v_ptr32()
self.ListHead = LIST_ENTRY()
self._pad0018 = v_bytes(size=4)
self.Event = KEVENT()
self.CollidedEvent = KEVENT()
self.IoStatus = IO_STATUS_BLOCK()
self.ReadOffset = LARGE_INTEGER()
self.PteContents = MMPTE()
self.LockedProtoPfn = v_ptr32()
self.WaitCount = v_uint32()
self.ByteCount = v_uint32()
self.u3 = _unnamed_37086()
self.u1 = _unnamed_37087()
self.FilePointer = v_ptr32()
self.ControlArea = v_ptr32()
self.FaultingAddress = v_ptr32()
self.PointerPte = v_ptr32()
self.BasePte = v_ptr32()
self.Pfn = v_ptr32()
self.PrefetchMdl = v_ptr32()
self.Mdl = MDL()
self.Page = vstruct.VArray([ v_uint32() for i in xrange(16) ])
self._pad00e0 = v_bytes(size=4)
class SYSTEM_POWER_POLICY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Revision = v_uint32()
self.PowerButton = POWER_ACTION_POLICY()
self.SleepButton = POWER_ACTION_POLICY()
self.LidClose = POWER_ACTION_POLICY()
self.LidOpenWake = v_uint32()
self.Reserved = v_uint32()
self.Idle = POWER_ACTION_POLICY()
self.IdleTimeout = v_uint32()
self.IdleSensitivity = v_uint8()
self.DynamicThrottle = v_uint8()
self.Spare2 = vstruct.VArray([ v_uint8() for i in xrange(2) ])
self.MinSleep = v_uint32()
self.MaxSleep = v_uint32()
self.ReducedLatencySleep = v_uint32()
self.WinLogonFlags = v_uint32()
self.Spare3 = v_uint32()
self.DozeS4Timeout = v_uint32()
self.BroadcastCapacityResolution = v_uint32()
self.DischargePolicy = vstruct.VArray([ SYSTEM_POWER_LEVEL() for i in xrange(4) ])
self.VideoTimeout = v_uint32()
self.VideoDimDisplay = v_uint8()
self._pad00c8 = v_bytes(size=3)
self.VideoReserved = vstruct.VArray([ v_uint32() for i in xrange(3) ])
self.SpindownTimeout = v_uint32()
self.OptimizeForPower = v_uint8()
self.FanThrottleTolerance = v_uint8()
self.ForcedThrottle = v_uint8()
self.MinThrottle = v_uint8()
self.OverThrottled = POWER_ACTION_POLICY()
class KRESOURCEMANAGER_COMPLETION_BINDING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NotificationListHead = LIST_ENTRY()
self.Port = v_ptr32()
self.Key = v_uint32()
self.BindingProcess = v_ptr32()
class WHEA_XPF_MC_BANK_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BankNumber = v_uint8()
self.ClearOnInitialization = v_uint8()
self.StatusDataFormat = v_uint8()
self.Flags = XPF_MC_BANK_FLAGS()
self.ControlMsr = v_uint32()
self.StatusMsr = v_uint32()
self.AddressMsr = v_uint32()
self.MiscMsr = v_uint32()
self.ControlData = v_uint64()
class KTHREAD_COUNTERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WaitReasonBitMap = v_uint64()
self.UserData = v_ptr32()
self.Flags = v_uint32()
self.ContextSwitches = v_uint32()
self._pad0018 = v_bytes(size=4)
self.CycleTimeBias = v_uint64()
self.HardwareCounters = v_uint64()
self.HwCounter = vstruct.VArray([ COUNTER_READING() for i in xrange(16) ])
class MMADDRESS_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.u1 = _unnamed_37062()
self.EndVa = v_ptr32()
class OBJECT_REF_TRACE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.StackTrace = vstruct.VArray([ v_ptr32() for i in xrange(16) ])
class KALPC_RESERVE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.OwnerPort = v_ptr32()
self.HandleTable = v_ptr32()
self.Handle = v_ptr32()
self.Message = v_ptr32()
self.Active = v_uint32()
class KINTERRUPT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.InterruptListEntry = LIST_ENTRY()
self.ServiceRoutine = v_ptr32()
self.MessageServiceRoutine = v_ptr32()
self.MessageIndex = v_uint32()
self.ServiceContext = v_ptr32()
self.SpinLock = v_uint32()
self.TickCount = v_uint32()
self.ActualLock = v_ptr32()
self.DispatchAddress = v_ptr32()
self.Vector = v_uint32()
self.Irql = v_uint8()
self.SynchronizeIrql = v_uint8()
self.FloatingSave = v_uint8()
self.Connected = v_uint8()
self.Number = v_uint32()
self.ShareVector = v_uint8()
self._pad003a = v_bytes(size=1)
self.ActiveCount = v_uint16()
self.InternalState = v_uint32()
self.Mode = v_uint32()
self.Polarity = v_uint32()
self.ServiceCount = v_uint32()
self.DispatchCount = v_uint32()
self.PassiveEvent = v_ptr32()
self.DispatchCode = vstruct.VArray([ v_uint32() for i in xrange(145) ])
self.DisconnectData = v_ptr32()
self.ServiceThread = v_ptr32()
class _unnamed_33989(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Port = _unnamed_34088()
class SECURITY_DESCRIPTOR_RELATIVE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Revision = v_uint8()
self.Sbz1 = v_uint8()
self.Control = v_uint16()
self.Owner = v_uint32()
self.Group = v_uint32()
self.Sacl = v_uint32()
self.Dacl = v_uint32()
class DUMP_INITIALIZATION_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.Reserved = v_uint32()
self.MemoryBlock = v_ptr32()
self.CommonBuffer = vstruct.VArray([ v_ptr32() for i in xrange(2) ])
self._pad0018 = v_bytes(size=4)
self.PhysicalAddress = vstruct.VArray([ LARGE_INTEGER() for i in xrange(2) ])
self.StallRoutine = v_ptr32()
self.OpenRoutine = v_ptr32()
self.WriteRoutine = v_ptr32()
self.FinishRoutine = v_ptr32()
self.AdapterObject = v_ptr32()
self.MappedRegisterBase = v_ptr32()
self.PortConfiguration = v_ptr32()
self.CrashDump = v_uint8()
self.MarkMemoryOnly = v_uint8()
self.HiberResume = v_uint8()
self.Reserved1 = v_uint8()
self.MaximumTransferSize = v_uint32()
self.CommonBufferSize = v_uint32()
self.TargetAddress = v_ptr32()
self.WritePendingRoutine = v_ptr32()
self.PartitionStyle = v_uint32()
self.DiskInfo = _unnamed_37043()
self.ReadRoutine = v_ptr32()
self.GetDriveTelemetryRoutine = v_ptr32()
self.LogSectionTruncateSize = v_uint32()
self.Parameters = vstruct.VArray([ v_uint32() for i in xrange(16) ])
self.GetTransferSizesRoutine = v_ptr32()
self.DumpNotifyRoutine = v_ptr32()
class AER_ENDPOINT_DESCRIPTOR_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.UncorrectableErrorMaskRW = v_uint16()
class VERIFIER_SHARED_EXPORT_THUNK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class FILE_GET_QUOTA_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NextEntryOffset = v_uint32()
self.SidLength = v_uint32()
self.Sid = SID()
class _unnamed_34368(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Balance = v_uint32()
class OBJECT_HANDLE_COUNT_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Process = v_ptr32()
self.HandleCount = v_uint32()
class MI_REVERSE_VIEW_MAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ViewLinks = LIST_ENTRY()
self.SystemCacheVa = v_ptr32()
self.Subsection = v_ptr32()
self.SectionOffset = v_uint64()
class IRP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.MdlAddress = v_ptr32()
self.Flags = v_uint32()
self.AssociatedIrp = _unnamed_27730()
self.ThreadListEntry = LIST_ENTRY()
self.IoStatus = IO_STATUS_BLOCK()
self.RequestorMode = v_uint8()
self.PendingReturned = v_uint8()
self.StackCount = v_uint8()
self.CurrentLocation = v_uint8()
self.Cancel = v_uint8()
self.CancelIrql = v_uint8()
self.ApcEnvironment = v_uint8()
self.AllocationFlags = v_uint8()
self.UserIosb = v_ptr32()
self.UserEvent = v_ptr32()
self.Overlay = _unnamed_27733()
self.CancelRoutine = v_ptr32()
self.UserBuffer = v_ptr32()
self.Tail = _unnamed_27736()
class VF_KE_CRITICAL_REGION_TRACE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Thread = v_ptr32()
self.StackTrace = vstruct.VArray([ v_ptr32() for i in xrange(7) ])
class KGATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DISPATCHER_HEADER()
class IO_COMPLETION_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Port = v_ptr32()
self.Key = v_ptr32()
class DRIVER_EXTENSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DriverObject = v_ptr32()
self.AddDevice = v_ptr32()
self.Count = v_uint32()
self.ServiceKeyName = UNICODE_STRING()
self.ClientDriverExtension = v_ptr32()
self.FsFilterCallbacks = v_ptr32()
self.KseCallbacks = v_ptr32()
self.DvCallbacks = v_ptr32()
class RTL_CRITICAL_SECTION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DebugInfo = v_ptr32()
self.LockCount = v_uint32()
self.RecursionCount = v_uint32()
self.OwningThread = v_ptr32()
self.LockSemaphore = v_ptr32()
self.SpinCount = v_uint32()
class PLATFORM_IDLE_ACCOUNTING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ResetCount = v_uint32()
self.StateCount = v_uint32()
self.TimeUnit = v_uint32()
self._pad0010 = v_bytes(size=4)
self.StartTime = v_uint64()
self.State = vstruct.VArray([ PLATFORM_IDLE_STATE_ACCOUNTING() for i in xrange(1) ])
class MMPFN(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.u1 = _unnamed_28805()
self.u2 = _unnamed_28806()
self.PteAddress = v_ptr32()
self.u3 = _unnamed_28808()
self.OriginalPte = MMPTE()
self.u4 = _unnamed_28809()
class PO_IRP_QUEUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CurrentIrp = v_ptr32()
self.PendingIrpList = v_ptr32()
class HIVE_LOAD_FAILURE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Hive = v_ptr32()
self.Index = v_uint32()
self.RecoverableIndex = v_uint32()
self.Locations = vstruct.VArray([ _unnamed_29146() for i in xrange(8) ])
self.RecoverableLocations = vstruct.VArray([ _unnamed_29146() for i in xrange(8) ])
self.RegistryIO = _unnamed_29147()
self.CheckRegistry2 = _unnamed_29148()
self.CheckKey = _unnamed_29149()
self.CheckValueList = _unnamed_29150()
self.CheckHive = _unnamed_29151()
self.CheckHive1 = _unnamed_29151()
self.CheckBin = _unnamed_29152()
self.RecoverData = _unnamed_29153()
class flags(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Removable = v_uint8()
class _unnamed_31167(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.s1 = _unnamed_31169()
class _unnamed_31169(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Revoked = v_uint32()
class DBGKD_SEARCH_MEMORY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SearchAddress = v_uint64()
self.SearchLength = v_uint64()
self.PatternLength = v_uint32()
self._pad0018 = v_bytes(size=4)
class MI_VAD_SEQUENTIAL_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
class _unnamed_34678(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Channel = v_uint32()
self.RequestLine = v_uint32()
self.TransferWidth = v_uint8()
self.Reserved1 = v_uint8()
self.Reserved2 = v_uint8()
self.Reserved3 = v_uint8()
class POP_FX_IDLE_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TransitionLatency = v_uint64()
self.ResidencyRequirement = v_uint64()
self.NominalPower = v_uint32()
self._pad0018 = v_bytes(size=4)
class ALPC_COMPLETION_PACKET_LOOKASIDE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Lock = v_uint32()
self.Size = v_uint32()
self.ActiveCount = v_uint32()
self.PendingNullCount = v_uint32()
self.PendingCheckCompletionListCount = v_uint32()
self.PendingDelete = v_uint32()
self.FreeListHead = SINGLE_LIST_ENTRY()
self.CompletionPort = v_ptr32()
self.CompletionKey = v_ptr32()
self.Entry = vstruct.VArray([ ALPC_COMPLETION_PACKET_LOOKASIDE_ENTRY() for i in xrange(1) ])
class WHEA_PERSISTENCE_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint64()
class ETW_LAST_ENABLE_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.EnableFlags = LARGE_INTEGER()
self.LoggerId = v_uint16()
self.Level = v_uint8()
self.Enabled = v_uint8()
self._pad0010 = v_bytes(size=4)
class HEAP_VIRTUAL_ALLOC_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Entry = LIST_ENTRY()
self.ExtraStuff = HEAP_ENTRY_EXTRA()
self.CommitSize = v_uint32()
self.ReserveSize = v_uint32()
self.BusyBlock = HEAP_ENTRY()
class VI_DEADLOCK_THREAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Thread = v_ptr32()
self.CurrentSpinNode = v_ptr32()
self.CurrentOtherNode = v_ptr32()
self.ListEntry = LIST_ENTRY()
self.NodeCount = v_uint32()
self.PagingCount = v_uint32()
self.ThreadUsesEresources = v_uint8()
self._pad0020 = v_bytes(size=3)
class _unnamed_34671(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Raw = _unnamed_34666()
class VF_SUSPECT_DRIVER_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Links = LIST_ENTRY()
self.Loads = v_uint32()
self.Unloads = v_uint32()
self.BaseName = UNICODE_STRING()
class _unnamed_34674(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Channel = v_uint32()
self.Port = v_uint32()
self.Reserved1 = v_uint32()
class _unnamed_34574(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ProviderPdo = v_ptr32()
class _unnamed_25488(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LongFunction = v_uint32()
class ARBITER_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Parameters = _unnamed_34931()
class EXCEPTION_RECORD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExceptionCode = v_uint32()
self.ExceptionFlags = v_uint32()
self.ExceptionRecord = v_ptr32()
self.ExceptionAddress = v_ptr32()
self.NumberParameters = v_uint32()
self.ExceptionInformation = vstruct.VArray([ v_uint32() for i in xrange(15) ])
class X86_DBGKD_CONTROL_SET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TraceFlag = v_uint32()
self.Dr7 = v_uint32()
self.CurrentSymbolStart = v_uint32()
self.CurrentSymbolEnd = v_uint32()
class POP_CURRENT_BROADCAST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InProgress = v_uint8()
self._pad0004 = v_bytes(size=3)
self.SystemContext = SYSTEM_POWER_STATE_CONTEXT()
self.PowerAction = v_uint32()
self.DeviceState = v_ptr32()
class MMPTE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.u = _unnamed_28657()
class _unnamed_28098(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IoResourceRequirementList = v_ptr32()
class VI_DEADLOCK_NODE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Parent = v_ptr32()
self.ChildrenList = LIST_ENTRY()
self.SiblingsList = LIST_ENTRY()
self.ResourceList = LIST_ENTRY()
self.Root = v_ptr32()
self.ThreadEntry = v_ptr32()
self.u1 = _unnamed_36349()
self.ChildrenCount = v_uint32()
self.StackTrace = vstruct.VArray([ v_ptr32() for i in xrange(8) ])
self.ParentStackTrace = vstruct.VArray([ v_ptr32() for i in xrange(8) ])
class PROC_IDLE_STATE_BUCKET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TotalTime = v_uint64()
self.MinTime = v_uint64()
self.MaxTime = v_uint64()
self.Count = v_uint32()
self._pad0020 = v_bytes(size=4)
class _unnamed_25485(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint32()
class tagSWITCH_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Attribute = tagSWITCH_CONTEXT_ATTRIBUTE()
self.Data = tagSWITCH_CONTEXT_DATA()
class _unnamed_28657(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Long = v_uint64()
class VACB_ARRAY_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.VacbArrayIndex = v_uint32()
self.MappingCount = v_uint32()
self.HighestMappedIndex = v_uint32()
self.Reserved = v_uint32()
class HEAP_STOP_ON_TAG(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.HeapAndTagIndex = v_uint32()
class KPCR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NtTib = NT_TIB()
self.SelfPcr = v_ptr32()
self.Prcb = v_ptr32()
self.Irql = v_uint8()
self._pad0028 = v_bytes(size=3)
self.IRR = v_uint32()
self.IrrActive = v_uint32()
self.IDR = v_uint32()
self.KdVersionBlock = v_ptr32()
self.IDT = v_ptr32()
self.GDT = v_ptr32()
self.TSS = v_ptr32()
self.MajorVersion = v_uint16()
self.MinorVersion = v_uint16()
self.SetMember = v_uint32()
self.StallScaleFactor = v_uint32()
self.SpareUnused = v_uint8()
self.Number = v_uint8()
self.Spare0 = v_uint8()
self.SecondLevelCacheAssociativity = v_uint8()
self.VdmAlert = v_uint32()
self.KernelReserved = vstruct.VArray([ v_uint32() for i in xrange(14) ])
self.SecondLevelCacheSize = v_uint32()
self.HalReserved = vstruct.VArray([ v_uint32() for i in xrange(16) ])
self.InterruptMode = v_uint32()
self.Spare1 = v_uint8()
self._pad00dc = v_bytes(size=3)
self.KernelReserved2 = vstruct.VArray([ v_uint32() for i in xrange(17) ])
self.PrcbData = KPRCB()
class RTL_RB_TREE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Root = v_ptr32()
self.Min = v_ptr32()
class IMAGE_FILE_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Machine = v_uint16()
self.NumberOfSections = v_uint16()
self.TimeDateStamp = v_uint32()
self.PointerToSymbolTable = v_uint32()
self.NumberOfSymbols = v_uint32()
self.SizeOfOptionalHeader = v_uint16()
self.Characteristics = v_uint16()
class DBGKD_SET_SPECIAL_CALL64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SpecialCall = v_uint64()
class CM_KEY_INDEX(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint16()
self.Count = v_uint16()
self.List = vstruct.VArray([ v_uint32() for i in xrange(1) ])
class FILE_STANDARD_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AllocationSize = LARGE_INTEGER()
self.EndOfFile = LARGE_INTEGER()
self.NumberOfLinks = v_uint32()
self.DeletePending = v_uint8()
self.Directory = v_uint8()
self._pad0018 = v_bytes(size=2)
class RELATION_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint32()
self.TagCount = v_uint32()
self.FirstLevel = v_uint32()
self.MaxLevel = v_uint32()
self.Entries = vstruct.VArray([ v_ptr32() for i in xrange(1) ])
class ETWP_NOTIFICATION_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NotificationType = v_uint32()
self.NotificationSize = v_uint32()
self.RefCount = v_uint32()
self.ReplyRequested = v_uint8()
self._pad0010 = v_bytes(size=3)
self.ReplyIndex = v_uint32()
self.ReplyCount = v_uint32()
self.ReplyHandle = v_uint64()
self.TargetPID = v_uint32()
self.SourcePID = v_uint32()
self.DestinationGuid = GUID()
self.SourceGuid = GUID()
class PI_RESOURCE_ARBITER_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeviceArbiterList = LIST_ENTRY()
self.ResourceType = v_uint8()
self._pad000c = v_bytes(size=3)
self.ArbiterInterface = v_ptr32()
self.DeviceNode = v_ptr32()
self.ResourceList = LIST_ENTRY()
self.BestResourceList = LIST_ENTRY()
self.BestConfig = LIST_ENTRY()
self.ActiveArbiterList = LIST_ENTRY()
self.State = v_uint8()
self.ResourcesChanged = v_uint8()
self._pad0038 = v_bytes(size=2)
class AMD64_DBGKD_CONTROL_SET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TraceFlag = v_uint32()
self.Dr7 = v_uint64()
self.CurrentSymbolStart = v_uint64()
self.CurrentSymbolEnd = v_uint64()
class _unnamed_27730(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MasterIrp = v_ptr32()
class SYSPTES_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListHead = LIST_ENTRY()
self.Count = v_uint32()
self.NumberOfEntries = v_uint32()
self.NumberOfEntriesPeak = v_uint32()
class _unnamed_25441(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.HighPart = v_uint32()
class DBGKD_READ_WRITE_IO_EXTENDED32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DataSize = v_uint32()
self.InterfaceType = v_uint32()
self.BusNumber = v_uint32()
self.AddressSpace = v_uint32()
self.IoAddress = v_uint32()
self.DataValue = v_uint32()
class _unnamed_35222(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ProgrammedTime = v_uint64()
self.TimerInfo = v_ptr32()
self._pad0010 = v_bytes(size=4)
class PEB_LDR_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.Initialized = v_uint8()
self._pad0008 = v_bytes(size=3)
self.SsHandle = v_ptr32()
self.InLoadOrderModuleList = LIST_ENTRY()
self.InMemoryOrderModuleList = LIST_ENTRY()
self.InInitializationOrderModuleList = LIST_ENTRY()
self.EntryInProgress = v_ptr32()
self.ShutdownInProgress = v_uint8()
self._pad002c = v_bytes(size=3)
self.ShutdownThreadId = v_ptr32()
class DBGKD_WRITE_BREAKPOINT64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BreakPointAddress = v_uint64()
self.BreakPointHandle = v_uint32()
self._pad0010 = v_bytes(size=4)
class FSRTL_ADVANCED_FCB_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NodeTypeCode = v_uint16()
self.NodeByteSize = v_uint16()
self.Flags = v_uint8()
self.IsFastIoPossible = v_uint8()
self.Flags2 = v_uint8()
self.Reserved = v_uint8()
self.Resource = v_ptr32()
self.PagingIoResource = v_ptr32()
self.AllocationSize = LARGE_INTEGER()
self.FileSize = LARGE_INTEGER()
self.ValidDataLength = LARGE_INTEGER()
self.FastMutex = v_ptr32()
self.FilterContexts = LIST_ENTRY()
self.PushLock = EX_PUSH_LOCK()
self.FileContextSupportPointer = v_ptr32()
self.Oplock = v_ptr32()
class ARBITER_INTERFACE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self.Version = v_uint16()
self.Context = v_ptr32()
self.InterfaceReference = v_ptr32()
self.InterfaceDereference = v_ptr32()
self.ArbiterHandler = v_ptr32()
self.Flags = v_uint32()
class DIAGNOSTIC_BUFFER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint32()
self.CallerType = v_uint32()
self.ProcessImageNameOffset = v_uint32()
self.ProcessId = v_uint32()
self.ServiceTag = v_uint32()
self.ReasonOffset = v_uint32()
class POOL_TRACKER_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Key = v_uint32()
self.NonPagedAllocs = v_uint32()
self.NonPagedFrees = v_uint32()
self.NonPagedBytes = v_uint32()
self.PagedAllocs = v_uint32()
self.PagedFrees = v_uint32()
self.PagedBytes = v_uint32()
class _unnamed_34312(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AllSharedExportThunks = VF_TARGET_ALL_SHARED_EXPORT_THUNKS()
class PCW_BUFFER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class _unnamed_34731(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PhysicalAddress = v_uint32()
class SECURITY_SUBJECT_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ClientToken = v_ptr32()
self.ImpersonationLevel = v_uint32()
self.PrimaryToken = v_ptr32()
self.ProcessAuditId = v_ptr32()
class POP_IO_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DumpMdl = v_ptr32()
self.IoStatus = v_uint32()
self.IoStartCount = v_uint64()
self.IoBytesCompleted = v_uint64()
self.IoBytesInProgress = v_uint64()
self.RequestSize = v_uint64()
self.IoLocation = LARGE_INTEGER()
self.FileOffset = v_uint64()
self.Buffer = v_ptr32()
self.AsyncCapable = v_uint8()
self._pad0040 = v_bytes(size=3)
self.BytesToRead = v_uint64()
self.Pages = v_uint32()
self._pad0050 = v_bytes(size=4)
class HIVE_WAIT_PACKET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WakeEvent = KEVENT()
self.Status = v_uint32()
self.Next = v_ptr32()
self.PrimaryFileWritten = v_uint8()
self._pad001c = v_bytes(size=3)
class KALPC_REGION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.RegionListEntry = LIST_ENTRY()
self.Section = v_ptr32()
self.Offset = v_uint32()
self.Size = v_uint32()
self.ViewSize = v_uint32()
self.u1 = _unnamed_30902()
self.NumberOfViews = v_uint32()
self.ViewListHead = LIST_ENTRY()
self.ReadOnlyView = v_ptr32()
self.ReadWriteView = v_ptr32()
class VF_TRACKER_STAMP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Thread = v_ptr32()
self.Flags = v_uint8()
self.OldIrql = v_uint8()
self.NewIrql = v_uint8()
self.Processor = v_uint8()
class POP_FX_COMPONENT_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Value = v_uint32()
self.Value2 = v_uint32()
class KERNEL_STACK_SEGMENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.StackBase = v_uint32()
self.StackLimit = v_uint32()
self.KernelStack = v_uint32()
self.InitialStack = v_uint32()
class ALPC_MESSAGE_ATTRIBUTES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AllocatedAttributes = v_uint32()
self.ValidAttributes = v_uint32()
class POP_THERMAL_ZONE_METRICS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MetricsResource = ERESOURCE()
self.ActiveCount = v_uint32()
self.PassiveCount = v_uint32()
self.LastActiveStartTick = LARGE_INTEGER()
self.AverageActiveTime = LARGE_INTEGER()
self.LastPassiveStartTick = LARGE_INTEGER()
self.AveragePassiveTime = LARGE_INTEGER()
self.StartTickSinceLastReset = LARGE_INTEGER()
class PCW_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Data = v_ptr32()
self.Size = v_uint32()
class DEVICE_RELATIONS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint32()
self.Objects = vstruct.VArray([ v_ptr32() for i in xrange(1) ])
class POOL_BLOCK_HEAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = POOL_HEADER()
self.List = LIST_ENTRY()
class TRACE_ENABLE_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IsEnabled = v_uint32()
self.Level = v_uint8()
self.Reserved1 = v_uint8()
self.LoggerId = v_uint16()
self.EnableProperty = v_uint32()
self.Reserved2 = v_uint32()
self.MatchAnyKeyword = v_uint64()
self.MatchAllKeyword = v_uint64()
class _unnamed_34389(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NotificationStructure = v_ptr32()
self.DeviceId = vstruct.VArray([ v_uint16() for i in xrange(1) ])
self._pad0008 = v_bytes(size=2)
class MMSUBSECTION_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SubsectionAccessed = v_uint16()
self.SubsectionStatic = v_uint16()
class INTERFACE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self.Version = v_uint16()
self.Context = v_ptr32()
self.InterfaceReference = v_ptr32()
self.InterfaceDereference = v_ptr32()
class SYSTEM_POWER_LEVEL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Enable = v_uint8()
self.Spare = vstruct.VArray([ v_uint8() for i in xrange(3) ])
self.BatteryLevel = v_uint32()
self.PowerPolicy = POWER_ACTION_POLICY()
self.MinSystemState = v_uint32()
class _unnamed_34387(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeviceId = vstruct.VArray([ v_uint16() for i in xrange(1) ])
class WMI_LOGGER_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LoggerId = v_uint32()
self.BufferSize = v_uint32()
self.MaximumEventSize = v_uint32()
self.LoggerMode = v_uint32()
self.AcceptNewEvents = v_uint32()
self.EventMarker = vstruct.VArray([ v_uint32() for i in xrange(1) ])
self.ErrorMarker = v_uint32()
self.SizeMask = v_uint32()
self.GetCpuClock = v_ptr32()
self.LoggerThread = v_ptr32()
self.LoggerStatus = v_uint32()
self.FailureReason = v_uint32()
self.BufferQueue = ETW_BUFFER_QUEUE()
self.OverflowQueue = ETW_BUFFER_QUEUE()
self.GlobalList = LIST_ENTRY()
self.ProviderBinaryList = LIST_ENTRY()
self.BatchedBufferList = v_ptr32()
self.LoggerName = UNICODE_STRING()
self.LogFileName = UNICODE_STRING()
self.LogFilePattern = UNICODE_STRING()
self.NewLogFileName = UNICODE_STRING()
self.ClockType = v_uint32()
self.LastFlushedBuffer = v_uint32()
self.FlushTimer = v_uint32()
self.FlushThreshold = v_uint32()
self._pad0090 = v_bytes(size=4)
self.ByteOffset = LARGE_INTEGER()
self.MinimumBuffers = v_uint32()
self.BuffersAvailable = v_uint32()
self.NumberOfBuffers = v_uint32()
self.MaximumBuffers = v_uint32()
self.EventsLost = v_uint32()
self.BuffersWritten = v_uint32()
self.LogBuffersLost = v_uint32()
self.RealTimeBuffersDelivered = v_uint32()
self.RealTimeBuffersLost = v_uint32()
self.SequencePtr = v_ptr32()
self.LocalSequence = v_uint32()
self.InstanceGuid = GUID()
self.MaximumFileSize = v_uint32()
self.FileCounter = v_uint32()
self.PoolType = v_uint32()
self.ReferenceTime = ETW_REF_CLOCK()
self.CollectionOn = v_uint32()
self.ProviderInfoSize = v_uint32()
self.Consumers = LIST_ENTRY()
self.NumConsumers = v_uint32()
self.TransitionConsumer = v_ptr32()
self.RealtimeLogfileHandle = v_ptr32()
self.RealtimeLogfileName = UNICODE_STRING()
self._pad0118 = v_bytes(size=4)
self.RealtimeWriteOffset = LARGE_INTEGER()
self.RealtimeReadOffset = LARGE_INTEGER()
self.RealtimeLogfileSize = LARGE_INTEGER()
self.RealtimeLogfileUsage = v_uint64()
self.RealtimeMaximumFileSize = v_uint64()
self.RealtimeBuffersSaved = v_uint32()
self._pad0148 = v_bytes(size=4)
self.RealtimeReferenceTime = ETW_REF_CLOCK()
self.NewRTEventsLost = v_uint32()
self.LoggerEvent = KEVENT()
self.FlushEvent = KEVENT()
self._pad0180 = v_bytes(size=4)
self.FlushTimeOutTimer = KTIMER()
self.LoggerDpc = KDPC()
self.LoggerMutex = KMUTANT()
self.LoggerLock = EX_PUSH_LOCK()
self.BufferListSpinLock = v_uint32()
self.ClientSecurityContext = SECURITY_CLIENT_CONTEXT()
self.SecurityDescriptor = EX_FAST_REF()
self.StartTime = LARGE_INTEGER()
self.LogFileHandle = v_ptr32()
self._pad0240 = v_bytes(size=4)
self.BufferSequenceNumber = v_uint64()
self.Flags = v_uint32()
self.RequestFlag = v_uint32()
self.HookIdMap = RTL_BITMAP()
self.StackCache = v_ptr32()
self.PmcData = v_ptr32()
self.WinRtProviderBinaryList = LIST_ENTRY()
self.ScratchArray = v_ptr32()
self._pad0270 = v_bytes(size=4)
class THREAD_PERFORMANCE_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self.Version = v_uint16()
self.ProcessorNumber = PROCESSOR_NUMBER()
self.ContextSwitches = v_uint32()
self.HwCountersCount = v_uint32()
self.UpdateCount = v_uint64()
self.WaitReasonBitMap = v_uint64()
self.HardwareCounters = v_uint64()
self.CycleTime = COUNTER_READING()
self.HwCounters = vstruct.VArray([ COUNTER_READING() for i in xrange(16) ])
class IO_STACK_LOCATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MajorFunction = v_uint8()
self.MinorFunction = v_uint8()
self.Flags = v_uint8()
self.Control = v_uint8()
self.Parameters = _unnamed_27770()
self.DeviceObject = v_ptr32()
self.FileObject = v_ptr32()
self.CompletionRoutine = v_ptr32()
self.Context = v_ptr32()
class DBGKD_READ_WRITE_MSR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Msr = v_uint32()
self.DataValueLow = v_uint32()
self.DataValueHigh = v_uint32()
class ARBITER_QUERY_CONFLICT_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PhysicalDeviceObject = v_ptr32()
self.ConflictingResource = v_ptr32()
self.ConflictCount = v_ptr32()
self.Conflicts = v_ptr32()
class IMAGE_DATA_DIRECTORY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.VirtualAddress = v_uint32()
self.Size = v_uint32()
class FILE_OBJECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.DeviceObject = v_ptr32()
self.Vpb = v_ptr32()
self.FsContext = v_ptr32()
self.FsContext2 = v_ptr32()
self.SectionObjectPointer = v_ptr32()
self.PrivateCacheMap = v_ptr32()
self.FinalStatus = v_uint32()
self.RelatedFileObject = v_ptr32()
self.LockOperation = v_uint8()
self.DeletePending = v_uint8()
self.ReadAccess = v_uint8()
self.WriteAccess = v_uint8()
self.DeleteAccess = v_uint8()
self.SharedRead = v_uint8()
self.SharedWrite = v_uint8()
self.SharedDelete = v_uint8()
self.Flags = v_uint32()
self.FileName = UNICODE_STRING()
self.CurrentByteOffset = LARGE_INTEGER()
self.Waiters = v_uint32()
self.Busy = v_uint32()
self.LastLock = v_ptr32()
self.Lock = KEVENT()
self.Event = KEVENT()
self.CompletionContext = v_ptr32()
self.IrpListLock = v_uint32()
self.IrpList = LIST_ENTRY()
self.FileObjectExtension = v_ptr32()
class PPM_IDLE_STATES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ForceIdle = v_uint8()
self.EstimateIdleDuration = v_uint8()
self.ExitLatencyTraceEnabled = v_uint8()
self._pad0004 = v_bytes(size=1)
self.ExitLatencyCountdown = v_uint32()
self.TargetState = v_uint32()
self.ActualState = v_uint32()
self.ActualPlatformState = v_uint32()
self.OldState = v_uint32()
self.OverrideIndex = v_uint32()
self.PlatformIdleCount = v_uint32()
self.ProcessorIdleCount = v_uint32()
self.Type = v_uint32()
self.ReasonFlags = v_uint32()
self._pad0030 = v_bytes(size=4)
self.InitiateWakeStamp = v_uint64()
self.PreviousStatus = v_uint32()
self.PrimaryProcessorMask = KAFFINITY_EX()
self.SecondaryProcessorMask = KAFFINITY_EX()
self.IdlePrepare = v_ptr32()
self.IdleExecute = v_ptr32()
self.IdleComplete = v_ptr32()
self.IdleCancel = v_ptr32()
self.IdleIsHalted = v_ptr32()
self.IdleInitiateWake = v_ptr32()
self._pad0070 = v_bytes(size=4)
self.PrepareInfo = PROCESSOR_IDLE_PREPARE_INFO()
self.State = vstruct.VArray([ PPM_IDLE_STATE() for i in xrange(1) ])
class MMPAGING_FILE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint32()
self.MaximumSize = v_uint32()
self.MinimumSize = v_uint32()
self.FreeSpace = v_uint32()
self.PeakUsage = v_uint32()
self.HighestPage = v_uint32()
self.FreeReservationSpace = v_uint32()
self.LargestReserveCluster = v_uint32()
self.File = v_ptr32()
self.Entry = vstruct.VArray([ v_ptr32() for i in xrange(2) ])
self.PageFileName = UNICODE_STRING()
self.Bitmaps = v_ptr32()
self.AllocationBitmapHint = v_uint32()
self.ReservationBitmapHint = v_uint32()
self.LargestNonReservedClusterSize = v_uint32()
self.RefreshClusterSize = v_uint32()
self.LastRefreshClusterSize = v_uint32()
self.ReservedClusterSizeAggregate = v_uint32()
self.ToBeEvictedCount = v_uint32()
self.PageFileNumber = v_uint16()
self.AdriftMdls = v_uint8()
self.Spare2 = v_uint8()
self.FileHandle = v_ptr32()
self.Lock = v_uint32()
self.LockOwner = v_ptr32()
class _unnamed_35986(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MissedEtwRegistration = v_uint32()
class IOV_IRP_TRACE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Irp = v_ptr32()
self.Thread = v_ptr32()
self.KernelApcDisable = v_uint16()
self.SpecialApcDisable = v_uint16()
self.Irql = v_uint8()
self._pad0010 = v_bytes(size=3)
self.StackTrace = vstruct.VArray([ v_ptr32() for i in xrange(12) ])
class WHEA_NOTIFICATION_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PollIntervalRW = v_uint16()
class LDR_SERVICE_TAG_RECORD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.ServiceTag = v_uint32()
class _unnamed_34404(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PowerSettingGuid = GUID()
self.Flags = v_uint32()
self.SessionId = v_uint32()
self.DataLength = v_uint32()
self.Data = vstruct.VArray([ v_uint8() for i in xrange(1) ])
self._pad0020 = v_bytes(size=3)
class SECTION_IMAGE_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TransferAddress = v_ptr32()
self.ZeroBits = v_uint32()
self.MaximumStackSize = v_uint32()
self.CommittedStackSize = v_uint32()
self.SubSystemType = v_uint32()
self.SubSystemMinorVersion = v_uint16()
self.SubSystemMajorVersion = v_uint16()
self.GpValue = v_uint32()
self.ImageCharacteristics = v_uint16()
self.DllCharacteristics = v_uint16()
self.Machine = v_uint16()
self.ImageContainsCode = v_uint8()
self.ImageFlags = v_uint8()
self.LoaderFlags = v_uint32()
self.ImageFileSize = v_uint32()
self.CheckSum = v_uint32()
class KENLISTMENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.cookie = v_uint32()
self.NamespaceLink = KTMOBJECT_NAMESPACE_LINK()
self.EnlistmentId = GUID()
self.Mutex = KMUTANT()
self.NextSameTx = LIST_ENTRY()
self.NextSameRm = LIST_ENTRY()
self.ResourceManager = v_ptr32()
self.Transaction = v_ptr32()
self.State = v_uint32()
self.Flags = v_uint32()
self.NotificationMask = v_uint32()
self.Key = v_ptr32()
self.KeyRefCount = v_uint32()
self.RecoveryInformation = v_ptr32()
self.RecoveryInformationLength = v_uint32()
self.DynamicNameInformation = v_ptr32()
self.DynamicNameInformationLength = v_uint32()
self.FinalNotification = v_ptr32()
self.SupSubEnlistment = v_ptr32()
self.SupSubEnlHandle = v_ptr32()
self.SubordinateTxHandle = v_ptr32()
self.CrmEnlistmentEnId = GUID()
self.CrmEnlistmentTmId = GUID()
self.CrmEnlistmentRmId = GUID()
self.NextHistory = v_uint32()
self.History = vstruct.VArray([ KENLISTMENT_HISTORY() for i in xrange(20) ])
class STRING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint16()
self.MaximumLength = v_uint16()
self.Buffer = v_ptr32()
class ERESOURCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SystemResourcesList = LIST_ENTRY()
self.OwnerTable = v_ptr32()
self.ActiveCount = v_uint16()
self.Flag = v_uint16()
self.SharedWaiters = v_ptr32()
self.ExclusiveWaiters = v_ptr32()
self.OwnerEntry = OWNER_ENTRY()
self.ActiveEntries = v_uint32()
self.ContentionCount = v_uint32()
self.NumberOfSharedWaiters = v_uint32()
self.NumberOfExclusiveWaiters = v_uint32()
self.Address = v_ptr32()
self.SpinLock = v_uint32()
class SUBSECTION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ControlArea = v_ptr32()
self.SubsectionBase = v_ptr32()
self.NextSubsection = v_ptr32()
self.PtesInSubsection = v_uint32()
self.UnusedPtes = v_uint32()
self.u = _unnamed_33507()
self.StartingSector = v_uint32()
self.NumberOfFullSectors = v_uint32()
class CM_WORKITEM(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.Private = v_uint32()
self.WorkerRoutine = v_ptr32()
self.Parameter = v_ptr32()
class DBGKD_SET_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ContextFlags = v_uint32()
class LPCP_MESSAGE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Entry = LIST_ENTRY()
self.SenderPort = v_ptr32()
self.RepliedToThread = v_ptr32()
self.PortContext = v_ptr32()
self._pad0018 = v_bytes(size=4)
self.Request = PORT_MESSAGE()
class _unnamed_34794(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IdleTime = v_uint32()
self.NonIdleTime = v_uint32()
class RTL_ATOM_TABLE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.HashLink = v_ptr32()
self.HandleIndex = v_uint16()
self.Atom = v_uint16()
self.Reference = RTL_ATOM_TABLE_REFERENCE()
self.NameLength = v_uint8()
self._pad001a = v_bytes(size=1)
self.Name = vstruct.VArray([ v_uint16() for i in xrange(1) ])
class _unnamed_36524(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.idxRecord = v_uint32()
self.cidContainer = v_uint32()
class TEB32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NtTib = NT_TIB32()
self.EnvironmentPointer = v_uint32()
self.ClientId = CLIENT_ID32()
self.ActiveRpcHandle = v_uint32()
self.ThreadLocalStoragePointer = v_uint32()
self.ProcessEnvironmentBlock = v_uint32()
self.LastErrorValue = v_uint32()
self.CountOfOwnedCriticalSections = v_uint32()
self.CsrClientThread = v_uint32()
self.Win32ThreadInfo = v_uint32()
self.User32Reserved = vstruct.VArray([ v_uint32() for i in xrange(26) ])
self.UserReserved = vstruct.VArray([ v_uint32() for i in xrange(5) ])
self.WOW32Reserved = v_uint32()
self.CurrentLocale = v_uint32()
self.FpSoftwareStatusRegister = v_uint32()
self.SystemReserved1 = vstruct.VArray([ v_uint32() for i in xrange(54) ])
self.ExceptionCode = v_uint32()
self.ActivationContextStackPointer = v_uint32()
self.SpareBytes = vstruct.VArray([ v_uint8() for i in xrange(36) ])
self.TxFsContext = v_uint32()
self.GdiTebBatch = GDI_TEB_BATCH32()
self.RealClientId = CLIENT_ID32()
self.GdiCachedProcessHandle = v_uint32()
self.GdiClientPID = v_uint32()
self.GdiClientTID = v_uint32()
self.GdiThreadLocalInfo = v_uint32()
self.Win32ClientInfo = vstruct.VArray([ v_uint32() for i in xrange(62) ])
self.glDispatchTable = vstruct.VArray([ v_uint32() for i in xrange(233) ])
self.glReserved1 = vstruct.VArray([ v_uint32() for i in xrange(29) ])
self.glReserved2 = v_uint32()
self.glSectionInfo = v_uint32()
self.glSection = v_uint32()
self.glTable = v_uint32()
self.glCurrentRC = v_uint32()
self.glContext = v_uint32()
self.LastStatusValue = v_uint32()
self.StaticUnicodeString = STRING32()
self.StaticUnicodeBuffer = vstruct.VArray([ v_uint16() for i in xrange(261) ])
self._pad0e0c = v_bytes(size=2)
self.DeallocationStack = v_uint32()
self.TlsSlots = vstruct.VArray([ v_uint32() for i in xrange(64) ])
self.TlsLinks = LIST_ENTRY32()
self.Vdm = v_uint32()
self.ReservedForNtRpc = v_uint32()
self.DbgSsReserved = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self.HardErrorMode = v_uint32()
self.Instrumentation = vstruct.VArray([ v_uint32() for i in xrange(9) ])
self.ActivityId = GUID()
self.SubProcessTag = v_uint32()
self.PerflibData = v_uint32()
self.EtwTraceData = v_uint32()
self.WinSockData = v_uint32()
self.GdiBatchCount = v_uint32()
self.CurrentIdealProcessor = PROCESSOR_NUMBER()
self.GuaranteedStackBytes = v_uint32()
self.ReservedForPerf = v_uint32()
self.ReservedForOle = v_uint32()
self.WaitingOnLoaderLock = v_uint32()
self.SavedPriorityState = v_uint32()
self.ReservedForCodeCoverage = v_uint32()
self.ThreadPoolData = v_uint32()
self.TlsExpansionSlots = v_uint32()
self.MuiGeneration = v_uint32()
self.IsImpersonating = v_uint32()
self.NlsCache = v_uint32()
self.pShimData = v_uint32()
self.HeapVirtualAffinity = v_uint16()
self.LowFragHeapDataSlot = v_uint16()
self.CurrentTransactionHandle = v_uint32()
self.ActiveFrame = v_uint32()
self.FlsData = v_uint32()
self.PreferredLanguages = v_uint32()
self.UserPrefLanguages = v_uint32()
self.MergedPrefLanguages = v_uint32()
self.MuiImpersonation = v_uint32()
self.CrossTebFlags = v_uint16()
self.SameTebFlags = v_uint16()
self.TxnScopeEnterCallback = v_uint32()
self.TxnScopeExitCallback = v_uint32()
self.TxnScopeContext = v_uint32()
self.LockCount = v_uint32()
self.SpareUlong0 = v_uint32()
self.ResourceRetValue = v_uint32()
self.ReservedForWdf = v_uint32()
class PROCESSOR_IDLE_DEPENDENCY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Processor = PROCESSOR_NUMBER()
self.ExpectedState = v_uint8()
self._pad0006 = v_bytes(size=1)
class PEB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InheritedAddressSpace = v_uint8()
self.ReadImageFileExecOptions = v_uint8()
self.BeingDebugged = v_uint8()
self.BitField = v_uint8()
self.Mutant = v_ptr32()
self.ImageBaseAddress = v_ptr32()
self.Ldr = v_ptr32()
self.ProcessParameters = v_ptr32()
self.SubSystemData = v_ptr32()
self.ProcessHeap = v_ptr32()
self.FastPebLock = v_ptr32()
self.AtlThunkSListPtr = v_ptr32()
self.IFEOKey = v_ptr32()
self.CrossProcessFlags = v_uint32()
self.KernelCallbackTable = v_ptr32()
self.SystemReserved = vstruct.VArray([ v_uint32() for i in xrange(1) ])
self.AtlThunkSListPtr32 = v_uint32()
self.ApiSetMap = v_ptr32()
self.TlsExpansionCounter = v_uint32()
self.TlsBitmap = v_ptr32()
self.TlsBitmapBits = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self.ReadOnlySharedMemoryBase = v_ptr32()
self.HotpatchInformation = v_ptr32()
self.ReadOnlyStaticServerData = v_ptr32()
self.AnsiCodePageData = v_ptr32()
self.OemCodePageData = v_ptr32()
self.UnicodeCaseTableData = v_ptr32()
self.NumberOfProcessors = v_uint32()
self.NtGlobalFlag = v_uint32()
self._pad0070 = v_bytes(size=4)
self.CriticalSectionTimeout = LARGE_INTEGER()
self.HeapSegmentReserve = v_uint32()
self.HeapSegmentCommit = v_uint32()
self.HeapDeCommitTotalFreeThreshold = v_uint32()
self.HeapDeCommitFreeBlockThreshold = v_uint32()
self.NumberOfHeaps = v_uint32()
self.MaximumNumberOfHeaps = v_uint32()
self.ProcessHeaps = v_ptr32()
self.GdiSharedHandleTable = v_ptr32()
self.ProcessStarterHelper = v_ptr32()
self.GdiDCAttributeList = v_uint32()
self.LoaderLock = v_ptr32()
self.OSMajorVersion = v_uint32()
self.OSMinorVersion = v_uint32()
self.OSBuildNumber = v_uint16()
self.OSCSDVersion = v_uint16()
self.OSPlatformId = v_uint32()
self.ImageSubsystem = v_uint32()
self.ImageSubsystemMajorVersion = v_uint32()
self.ImageSubsystemMinorVersion = v_uint32()
self.ActiveProcessAffinityMask = v_uint32()
self.GdiHandleBuffer = vstruct.VArray([ v_uint32() for i in xrange(34) ])
self.PostProcessInitRoutine = v_ptr32()
self.TlsExpansionBitmap = v_ptr32()
self.TlsExpansionBitmapBits = vstruct.VArray([ v_uint32() for i in xrange(32) ])
self.SessionId = v_uint32()
self.AppCompatFlags = ULARGE_INTEGER()
self.AppCompatFlagsUser = ULARGE_INTEGER()
self.pShimData = v_ptr32()
self.AppCompatInfo = v_ptr32()
self.CSDVersion = UNICODE_STRING()
self.ActivationContextData = v_ptr32()
self.ProcessAssemblyStorageMap = v_ptr32()
self.SystemDefaultActivationContextData = v_ptr32()
self.SystemAssemblyStorageMap = v_ptr32()
self.MinimumStackCommit = v_uint32()
self.FlsCallback = v_ptr32()
self.FlsListHead = LIST_ENTRY()
self.FlsBitmap = v_ptr32()
self.FlsBitmapBits = vstruct.VArray([ v_uint32() for i in xrange(4) ])
self.FlsHighIndex = v_uint32()
self.WerRegistrationData = v_ptr32()
self.WerShipAssertPtr = v_ptr32()
self.pUnused = v_ptr32()
self.pImageHeaderHash = v_ptr32()
self.TracingFlags = v_uint32()
self._pad0248 = v_bytes(size=4)
self.CsrServerReadOnlySharedMemoryBase = v_uint64()
class WHEA_XPF_CMC_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Enabled = v_uint8()
self.NumberOfBanks = v_uint8()
self.Reserved = v_uint32()
self.Notify = WHEA_NOTIFICATION_DESCRIPTOR()
self.Banks = vstruct.VArray([ WHEA_XPF_MC_BANK_DESCRIPTOR() for i in xrange(32) ])
class KSCB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.GenerationCycles = v_uint64()
self.UnderQuotaCycleTarget = v_uint64()
self.RankCycleTarget = v_uint64()
self.LongTermCycles = v_uint64()
self.LastReportedCycles = v_uint64()
self.OverQuotaHistory = v_uint64()
self.PerProcessorList = LIST_ENTRY()
self.QueueNode = RTL_BALANCED_NODE()
self.Inserted = v_uint8()
self.Spare2 = v_uint8()
self.ReadySummary = v_uint16()
self.Rank = v_uint32()
self.ReadyListHead = vstruct.VArray([ LIST_ENTRY() for i in xrange(16) ])
self._pad00d0 = v_bytes(size=4)
class DOCK_INTERFACE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self.Version = v_uint16()
self.Context = v_ptr32()
self.InterfaceReference = v_ptr32()
self.InterfaceDereference = v_ptr32()
self.ProfileDepartureSetMode = v_ptr32()
self.ProfileDepartureUpdate = v_ptr32()
class WHEA_AER_ROOTPORT_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Enabled = v_uint8()
self.Reserved = v_uint8()
self.BusNumber = v_uint32()
self.Slot = WHEA_PCI_SLOT_NUMBER()
self.DeviceControl = v_uint16()
self.Flags = AER_ROOTPORT_DESCRIPTOR_FLAGS()
self.UncorrectableErrorMask = v_uint32()
self.UncorrectableErrorSeverity = v_uint32()
self.CorrectableErrorMask = v_uint32()
self.AdvancedCapsAndControl = v_uint32()
self.RootErrorCommand = v_uint32()
class _unnamed_30920(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.s1 = _unnamed_30922()
class RTL_BALANCED_LINKS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Parent = v_ptr32()
self.LeftChild = v_ptr32()
self.RightChild = v_ptr32()
self.Balance = v_uint8()
self.Reserved = vstruct.VArray([ v_uint8() for i in xrange(3) ])
class MI_LARGEPAGE_MEMORY_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListHead = LIST_ENTRY()
self.ColoredPageInfoBase = v_ptr32()
self.PagesNeedZeroing = v_uint32()
class PROCESSOR_PROFILE_CONTROL_AREA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PebsDsSaveArea = PEBS_DS_SAVE_AREA()
class _unnamed_30479(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Reason = v_uint32()
class KENLISTMENT_HISTORY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Notification = v_uint32()
self.NewState = v_uint32()
class XSTATE_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Mask = v_uint64()
self.Length = v_uint32()
self.Reserved1 = v_uint32()
self.Area = v_ptr32()
self.Reserved2 = v_uint32()
self.Buffer = v_ptr32()
self.Reserved3 = v_uint32()
class RSDS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.Guid = GUID()
self.Age = v_uint32()
self.PdbName = vstruct.VArray([ v_uint8() for i in xrange(1) ])
self._pad001c = v_bytes(size=3)
class OBJECT_DIRECTORY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.HashBuckets = vstruct.VArray([ v_ptr32() for i in xrange(37) ])
self.Lock = EX_PUSH_LOCK()
self.DeviceMap = v_ptr32()
self.SessionId = v_uint32()
self.NamespaceEntry = v_ptr32()
self.Flags = v_uint32()
class _unnamed_35044(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LongFlags = v_uint32()
class _unnamed_35045(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LongFlags1 = v_uint32()
class AER_ROOTPORT_DESCRIPTOR_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.UncorrectableErrorMaskRW = v_uint16()
class BLOB_COUNTERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CreatedObjects = v_uint32()
self.DeletedObjects = v_uint32()
class ETW_STACK_CACHE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class MI_ACTIVE_WSLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flink = v_uint32()
self.Blink = v_uint32()
class MMIO_TRACKER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.PageFrameIndex = v_uint32()
self.NumberOfPages = v_uint32()
self.BaseVa = v_ptr32()
self.Mdl = v_ptr32()
self.MdlPages = v_uint32()
self.StackTrace = vstruct.VArray([ v_ptr32() for i in xrange(6) ])
self.CacheInfo = vstruct.VArray([ IO_CACHE_INFO() for i in xrange(1) ])
self._pad0038 = v_bytes(size=3)
class XSAVE_AREA_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Mask = v_uint64()
self.Reserved = vstruct.VArray([ v_uint64() for i in xrange(7) ])
class HEAP_SEGMENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Entry = HEAP_ENTRY()
self.SegmentSignature = v_uint32()
self.SegmentFlags = v_uint32()
self.SegmentListEntry = LIST_ENTRY()
self.Heap = v_ptr32()
self.BaseAddress = v_ptr32()
self.NumberOfPages = v_uint32()
self.FirstEntry = v_ptr32()
self.LastValidEntry = v_ptr32()
self.NumberOfUnCommittedPages = v_uint32()
self.NumberOfUnCommittedRanges = v_uint32()
self.SegmentAllocatorBackTraceIndex = v_uint16()
self.Reserved = v_uint16()
self.UCRSegmentList = LIST_ENTRY()
class _unnamed_34384(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ClassGuid = GUID()
self.SymbolicLinkName = vstruct.VArray([ v_uint16() for i in xrange(1) ])
self._pad0014 = v_bytes(size=2)
class HANDLE_TABLE_FREE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FreeListLock = EX_PUSH_LOCK()
self.FirstFreeHandleEntry = v_ptr32()
self.LastFreeHandleEntry = v_ptr32()
self.HandleCount = v_uint32()
self.HighWaterMark = v_uint32()
self.Reserved = vstruct.VArray([ v_uint32() for i in xrange(8) ])
class WHEA_ERROR_RECORD_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.Revision = WHEA_REVISION()
self.SignatureEnd = v_uint32()
self.SectionCount = v_uint16()
self.Severity = v_uint32()
self.ValidBits = WHEA_ERROR_RECORD_HEADER_VALIDBITS()
self.Length = v_uint32()
self.Timestamp = WHEA_TIMESTAMP()
self.PlatformId = GUID()
self.PartitionId = GUID()
self.CreatorId = GUID()
self.NotifyType = GUID()
self.RecordId = v_uint64()
self.Flags = WHEA_ERROR_RECORD_HEADER_FLAGS()
self.PersistenceInfo = WHEA_PERSISTENCE_INFO()
self.Reserved = vstruct.VArray([ v_uint8() for i in xrange(12) ])
class SEP_LOWBOX_HANDLES_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Lock = EX_PUSH_LOCK()
self.HashTable = v_ptr32()
class ETW_SYSTEMTIME(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Year = v_uint16()
self.Month = v_uint16()
self.DayOfWeek = v_uint16()
self.Day = v_uint16()
self.Hour = v_uint16()
self.Minute = v_uint16()
self.Second = v_uint16()
self.Milliseconds = v_uint16()
class _unnamed_28805(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flink = v_uint32()
class _unnamed_28806(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Blink = v_uint32()
class _unnamed_30473(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FileObject = v_ptr32()
class _unnamed_34105(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MinimumChannel = v_uint32()
self.MaximumChannel = v_uint32()
class _unnamed_34108(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.RequestLine = v_uint32()
self.Reserved = v_uint32()
self.Channel = v_uint32()
self.TransferWidth = v_uint32()
class _unnamed_28808(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ReferenceCount = v_uint16()
self.e1 = MMPFNENTRY()
class FLS_CALLBACK_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class _unnamed_36621(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Head = v_uint64()
class PPM_IDLE_SYNCHRONIZATION_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AsLong = v_uint32()
class MMSECURE_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ReadOnly = v_uint32()
class DBGKD_WRITE_MEMORY64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TargetBaseAddress = v_uint64()
self.TransferCount = v_uint32()
self.ActualBytesWritten = v_uint32()
class MI_TRIAGE_DUMP_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BadPageCount = v_uint32()
self.BadPagesDetected = v_uint32()
self.ZeroedPageSingleBitErrorsDetected = v_uint32()
self.ScrubPasses = v_uint32()
self.ScrubBadPagesFound = v_uint32()
self.FeatureBits = v_uint32()
self.TimeZoneId = v_uint32()
class OBJECT_HEADER_PADDING_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PaddingAmount = v_uint32()
class LIST_ENTRY64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flink = v_uint64()
self.Blink = v_uint64()
class VACB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BaseAddress = v_ptr32()
self.SharedCacheMap = v_ptr32()
self.Overlay = _unnamed_30413()
self.ArrayHead = v_ptr32()
self._pad0018 = v_bytes(size=4)
class EXHANDLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TagBits = v_uint32()
class WAIT_CONTEXT_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WaitQueueEntry = KDEVICE_QUEUE_ENTRY()
self.DeviceRoutine = v_ptr32()
self.DeviceContext = v_ptr32()
self.NumberOfMapRegisters = v_uint32()
self.DeviceObject = v_ptr32()
self.CurrentIrp = v_ptr32()
self.BufferChainingDpc = v_ptr32()
class CM_KEY_NODE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint16()
self.Flags = v_uint16()
self.LastWriteTime = LARGE_INTEGER()
self.AccessBits = v_uint32()
self.Parent = v_uint32()
self.SubKeyCounts = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self.SubKeyLists = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self.ValueList = CHILD_LIST()
self.Security = v_uint32()
self.Class = v_uint32()
self.MaxNameLen = v_uint32()
self.MaxClassLen = v_uint32()
self.MaxValueNameLen = v_uint32()
self.MaxValueDataLen = v_uint32()
self.WorkVar = v_uint32()
self.NameLength = v_uint16()
self.ClassLength = v_uint16()
self.Name = vstruct.VArray([ v_uint16() for i in xrange(1) ])
self._pad0050 = v_bytes(size=2)
class CM_KEY_VALUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint16()
self.NameLength = v_uint16()
self.DataLength = v_uint32()
self.Data = v_uint32()
self.Type = v_uint32()
self.Flags = v_uint16()
self.Spare = v_uint16()
self.Name = vstruct.VArray([ v_uint16() for i in xrange(1) ])
self._pad0018 = v_bytes(size=2)
class _unnamed_32535(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BaseMid = v_uint32()
class PNP_PROVIDER_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.ProviderType = v_uint8()
self.Satisfied = v_uint8()
self.Flags = v_uint16()
self.u = _unnamed_34574()
class ACTIVATION_CONTEXT_STACK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ActiveFrame = v_ptr32()
self.FrameListCache = LIST_ENTRY()
self.Flags = v_uint32()
self.NextCookieSequenceNumber = v_uint32()
self.StackId = v_uint32()
class MI_PAGING_FILE_SPACE_BITMAPS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.RefCount = v_uint32()
self.AllocationBitmap = RTL_BITMAP()
self.ReservationBitmap = RTL_BITMAP()
self.EvictStoreBitmap = v_ptr32()
class LDR_DATA_TABLE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InLoadOrderLinks = LIST_ENTRY()
self.InMemoryOrderLinks = LIST_ENTRY()
self.InInitializationOrderLinks = LIST_ENTRY()
self.DllBase = v_ptr32()
self.EntryPoint = v_ptr32()
self.SizeOfImage = v_uint32()
self.FullDllName = UNICODE_STRING()
self.BaseDllName = UNICODE_STRING()
self.FlagGroup = vstruct.VArray([ v_uint8() for i in xrange(4) ])
self.ObsoleteLoadCount = v_uint16()
self.TlsIndex = v_uint16()
self.HashLinks = LIST_ENTRY()
self.TimeDateStamp = v_uint32()
self.EntryPointActivationContext = v_ptr32()
self.PatchInformation = v_ptr32()
self.DdagNode = v_ptr32()
self.NodeModuleLink = LIST_ENTRY()
self.SnapContext = v_ptr32()
self.ParentDllBase = v_ptr32()
self.SwitchBackContext = v_ptr32()
self.BaseAddressIndexNode = RTL_BALANCED_NODE()
self.MappingInfoIndexNode = RTL_BALANCED_NODE()
self.OriginalBase = v_uint32()
self._pad0088 = v_bytes(size=4)
self.LoadTime = LARGE_INTEGER()
self.BaseNameHashValue = v_uint32()
self.LoadReason = v_uint32()
class SEP_AUDIT_POLICY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AdtTokenPolicy = TOKEN_AUDIT_POLICY()
self.PolicySetStatus = v_uint8()
class TEB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NtTib = NT_TIB()
self.EnvironmentPointer = v_ptr32()
self.ClientId = CLIENT_ID()
self.ActiveRpcHandle = v_ptr32()
self.ThreadLocalStoragePointer = v_ptr32()
self.ProcessEnvironmentBlock = v_ptr32()
self.LastErrorValue = v_uint32()
self.CountOfOwnedCriticalSections = v_uint32()
self.CsrClientThread = v_ptr32()
self.Win32ThreadInfo = v_ptr32()
self.User32Reserved = vstruct.VArray([ v_uint32() for i in xrange(26) ])
self.UserReserved = vstruct.VArray([ v_uint32() for i in xrange(5) ])
self.WOW32Reserved = v_ptr32()
self.CurrentLocale = v_uint32()
self.FpSoftwareStatusRegister = v_uint32()
self.SystemReserved1 = vstruct.VArray([ v_ptr32() for i in xrange(54) ])
self.ExceptionCode = v_uint32()
self.ActivationContextStackPointer = v_ptr32()
self.SpareBytes = vstruct.VArray([ v_uint8() for i in xrange(36) ])
self.TxFsContext = v_uint32()
self.GdiTebBatch = GDI_TEB_BATCH()
self.RealClientId = CLIENT_ID()
self.GdiCachedProcessHandle = v_ptr32()
self.GdiClientPID = v_uint32()
self.GdiClientTID = v_uint32()
self.GdiThreadLocalInfo = v_ptr32()
self.Win32ClientInfo = vstruct.VArray([ v_uint32() for i in xrange(62) ])
self.glDispatchTable = vstruct.VArray([ v_ptr32() for i in xrange(233) ])
self.glReserved1 = vstruct.VArray([ v_uint32() for i in xrange(29) ])
self.glReserved2 = v_ptr32()
self.glSectionInfo = v_ptr32()
self.glSection = v_ptr32()
self.glTable = v_ptr32()
self.glCurrentRC = v_ptr32()
self.glContext = v_ptr32()
self.LastStatusValue = v_uint32()
self.StaticUnicodeString = UNICODE_STRING()
self.StaticUnicodeBuffer = vstruct.VArray([ v_uint16() for i in xrange(261) ])
self._pad0e0c = v_bytes(size=2)
self.DeallocationStack = v_ptr32()
self.TlsSlots = vstruct.VArray([ v_ptr32() for i in xrange(64) ])
self.TlsLinks = LIST_ENTRY()
self.Vdm = v_ptr32()
self.ReservedForNtRpc = v_ptr32()
self.DbgSsReserved = vstruct.VArray([ v_ptr32() for i in xrange(2) ])
self.HardErrorMode = v_uint32()
self.Instrumentation = vstruct.VArray([ v_ptr32() for i in xrange(9) ])
self.ActivityId = GUID()
self.SubProcessTag = v_ptr32()
self.PerflibData = v_ptr32()
self.EtwTraceData = v_ptr32()
self.WinSockData = v_ptr32()
self.GdiBatchCount = v_uint32()
self.CurrentIdealProcessor = PROCESSOR_NUMBER()
self.GuaranteedStackBytes = v_uint32()
self.ReservedForPerf = v_ptr32()
self.ReservedForOle = v_ptr32()
self.WaitingOnLoaderLock = v_uint32()
self.SavedPriorityState = v_ptr32()
self.ReservedForCodeCoverage = v_uint32()
self.ThreadPoolData = v_ptr32()
self.TlsExpansionSlots = v_ptr32()
self.MuiGeneration = v_uint32()
self.IsImpersonating = v_uint32()
self.NlsCache = v_ptr32()
self.pShimData = v_ptr32()
self.HeapVirtualAffinity = v_uint16()
self.LowFragHeapDataSlot = v_uint16()
self.CurrentTransactionHandle = v_ptr32()
self.ActiveFrame = v_ptr32()
self.FlsData = v_ptr32()
self.PreferredLanguages = v_ptr32()
self.UserPrefLanguages = v_ptr32()
self.MergedPrefLanguages = v_ptr32()
self.MuiImpersonation = v_uint32()
self.CrossTebFlags = v_uint16()
self.SameTebFlags = v_uint16()
self.TxnScopeEnterCallback = v_ptr32()
self.TxnScopeExitCallback = v_ptr32()
self.TxnScopeContext = v_ptr32()
self.LockCount = v_uint32()
self.SpareUlong0 = v_uint32()
self.ResourceRetValue = v_ptr32()
self.ReservedForWdf = v_ptr32()
class EX_RUNDOWN_REF(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint32()
class POP_DEVICE_SYS_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IrpMinor = v_uint8()
self._pad0004 = v_bytes(size=3)
self.SystemState = v_uint32()
self.SpinLock = v_uint32()
self.Thread = v_ptr32()
self.AbortEvent = v_ptr32()
self.ReadySemaphore = v_ptr32()
self.FinishedSemaphore = v_ptr32()
self.Order = PO_DEVICE_NOTIFY_ORDER()
self.Pending = LIST_ENTRY()
self.Status = v_uint32()
self.FailedDevice = v_ptr32()
self.Waking = v_uint8()
self.Cancelled = v_uint8()
self.IgnoreErrors = v_uint8()
self.IgnoreNotImplemented = v_uint8()
self.TimeRefreshLockAcquired = v_uint8()
self._pad0104 = v_bytes(size=3)
class _unnamed_34035(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Polled = _unnamed_36885()
self._pad0018 = v_bytes(size=20)
class AUTHZBASEP_SECURITY_ATTRIBUTES_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityAttributeCount = v_uint32()
self.SecurityAttributesList = LIST_ENTRY()
self.WorkingSecurityAttributeCount = v_uint32()
self.WorkingSecurityAttributesList = LIST_ENTRY()
class CM_BIG_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint16()
self.Count = v_uint16()
self.List = v_uint32()
class MMWSLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.u1 = _unnamed_28872()
class VI_POOL_PAGE_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NextPage = v_ptr32()
self.VerifierEntry = v_ptr32()
self.Signature = v_uint32()
class PO_DIAG_STACK_RECORD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.StackDepth = v_uint32()
self.Stack = vstruct.VArray([ v_ptr32() for i in xrange(1) ])
class IMAGE_DOS_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.e_magic = v_uint16()
self.e_cblp = v_uint16()
self.e_cp = v_uint16()
self.e_crlc = v_uint16()
self.e_cparhdr = v_uint16()
self.e_minalloc = v_uint16()
self.e_maxalloc = v_uint16()
self.e_ss = v_uint16()
self.e_sp = v_uint16()
self.e_csum = v_uint16()
self.e_ip = v_uint16()
self.e_cs = v_uint16()
self.e_lfarlc = v_uint16()
self.e_ovno = v_uint16()
self.e_res = vstruct.VArray([ v_uint16() for i in xrange(4) ])
self.e_oemid = v_uint16()
self.e_oeminfo = v_uint16()
self.e_res2 = vstruct.VArray([ v_uint16() for i in xrange(10) ])
self.e_lfanew = v_uint32()
class WHEA_AER_BRIDGE_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Enabled = v_uint8()
self.Reserved = v_uint8()
self.BusNumber = v_uint32()
self.Slot = WHEA_PCI_SLOT_NUMBER()
self.DeviceControl = v_uint16()
self.Flags = AER_BRIDGE_DESCRIPTOR_FLAGS()
self.UncorrectableErrorMask = v_uint32()
self.UncorrectableErrorSeverity = v_uint32()
self.CorrectableErrorMask = v_uint32()
self.AdvancedCapsAndControl = v_uint32()
self.SecondaryUncorrectableErrorMask = v_uint32()
self.SecondaryUncorrectableErrorSev = v_uint32()
self.SecondaryCapsAndControl = v_uint32()
class DBGKD_FILL_MEMORY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Address = v_uint64()
self.Length = v_uint32()
self.Flags = v_uint16()
self.PatternLength = v_uint16()
class CM_KEY_SECURITY_CACHE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Cell = v_uint32()
self.CachedSecurity = v_ptr32()
class MM_AVL_NODE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.u1 = _unnamed_34368()
self.LeftChild = v_ptr32()
self.RightChild = v_ptr32()
class SESSION_LOWBOX_MAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.SessionId = v_uint32()
self.LowboxMap = SEP_LOWBOX_NUMBER_MAPPING()
class _unnamed_34744(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CellData = CELL_DATA()
class EX_PUSH_LOCK_CACHE_AWARE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Locks = vstruct.VArray([ v_ptr32() for i in xrange(32) ])
class ARBITER_ORDERING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = v_uint64()
self.End = v_uint64()
class MMVIEW(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PteOffset = v_uint64()
self.Entry = v_uint32()
self.u1 = MMVIEW_CONTROL_AREA()
self.ViewLinks = LIST_ENTRY()
self.SessionViewVa = v_ptr32()
self.SessionId = v_uint32()
self.SessionIdForGlobalSubsections = v_uint32()
self._pad0028 = v_bytes(size=4)
class _unnamed_34193(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint32()
class ETW_GUID_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.GuidList = LIST_ENTRY()
self.RefCount = v_uint32()
self.Guid = GUID()
self.RegListHead = LIST_ENTRY()
self.SecurityDescriptor = v_ptr32()
self.LastEnable = ETW_LAST_ENABLE_INFO()
self.ProviderEnableInfo = TRACE_ENABLE_INFO()
self.EnableInfo = vstruct.VArray([ TRACE_ENABLE_INFO() for i in xrange(8) ])
self.FilterData = v_ptr32()
self._pad0160 = v_bytes(size=4)
class WMI_BUFFER_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BufferSize = v_uint32()
self.SavedOffset = v_uint32()
self.CurrentOffset = v_uint32()
self.ReferenceCount = v_uint32()
self.TimeStamp = LARGE_INTEGER()
self.SequenceNumber = v_uint64()
self.ClockType = v_uint64()
self.ClientContext = ETW_BUFFER_CONTEXT()
self.State = v_uint32()
self.Offset = v_uint32()
self.BufferFlag = v_uint16()
self.BufferType = v_uint16()
self.Padding1 = vstruct.VArray([ v_uint32() for i in xrange(4) ])
class QUAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.UseThisFieldToCopy = v_uint64()
class OBJECT_HANDLE_COUNT_DATABASE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CountEntries = v_uint32()
self.HandleCountEntries = vstruct.VArray([ OBJECT_HANDLE_COUNT_ENTRY() for i in xrange(1) ])
class MMWSLE_HASH(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Index = v_uint32()
class PROC_PERF_SNAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Time = v_uint64()
self.LastTime = v_uint64()
self.Active = v_uint64()
self.LastActive = v_uint64()
self.FrequencyScaledActive = v_uint64()
self.PerformanceScaledActive = v_uint64()
self.CyclesActive = v_uint64()
self.CyclesAffinitized = v_uint64()
class _unnamed_35941(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AsUSHORT = v_uint16()
class HEAP_TUNING_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CommittThresholdShift = v_uint32()
self.MaxPreCommittThreshold = v_uint32()
class _unnamed_30755(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.DataInfoOffset = v_uint16()
class LPCP_PORT_OBJECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ConnectionPort = v_ptr32()
self.ConnectedPort = v_ptr32()
self.MsgQueue = LPCP_PORT_QUEUE()
self.Creator = CLIENT_ID()
self.ClientSectionBase = v_ptr32()
self.ServerSectionBase = v_ptr32()
self.PortContext = v_ptr32()
self.ClientThread = v_ptr32()
self.SecurityQos = SECURITY_QUALITY_OF_SERVICE()
self.StaticSecurity = SECURITY_CLIENT_CONTEXT()
self.LpcReplyChainHead = LIST_ENTRY()
self.LpcDataInfoChainHead = LIST_ENTRY()
self.ServerProcess = v_ptr32()
self.MaxMessageLength = v_uint16()
self.MaxConnectionInfoLength = v_uint16()
self.Flags = v_uint32()
self.WaitEvent = KEVENT()
class WHEA_XPF_MCE_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Enabled = v_uint8()
self.NumberOfBanks = v_uint8()
self.Flags = XPF_MCE_FLAGS()
self.MCG_Capability = v_uint64()
self.MCG_GlobalControl = v_uint64()
self.Banks = vstruct.VArray([ WHEA_XPF_MC_BANK_DESCRIPTOR() for i in xrange(32) ])
class EVENT_FILTER_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Ptr = v_uint64()
self.Size = v_uint32()
self.Type = v_uint32()
class _unnamed_30750(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DataLength = v_uint16()
self.TotalLength = v_uint16()
class CALL_PERFORMANCE_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SpinLock = v_uint32()
self.HashTable = vstruct.VArray([ LIST_ENTRY() for i in xrange(64) ])
class KPRCB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MinorVersion = v_uint16()
self.MajorVersion = v_uint16()
self.CurrentThread = v_ptr32()
self.NextThread = v_ptr32()
self.IdleThread = v_ptr32()
self.LegacyNumber = v_uint8()
self.NestingLevel = v_uint8()
self.BuildType = v_uint16()
self.CpuType = v_uint8()
self.CpuID = v_uint8()
self.CpuStep = v_uint16()
self.ProcessorState = KPROCESSOR_STATE()
self.KernelReserved = vstruct.VArray([ v_uint32() for i in xrange(16) ])
self.HalReserved = vstruct.VArray([ v_uint32() for i in xrange(16) ])
self.CFlushSize = v_uint32()
self.CoresPerPhysicalProcessor = v_uint8()
self.LogicalProcessorsPerCore = v_uint8()
self.PrcbPad0 = vstruct.VArray([ v_uint8() for i in xrange(2) ])
self.MHz = v_uint32()
self.CpuVendor = v_uint8()
self.GroupIndex = v_uint8()
self.Group = v_uint16()
self.GroupSetMember = v_uint32()
self.Number = v_uint32()
self.ClockOwner = v_uint8()
self.PendingTick = v_uint8()
self.PrcbPad1 = vstruct.VArray([ v_uint8() for i in xrange(70) ])
self.LockQueue = vstruct.VArray([ KSPIN_LOCK_QUEUE() for i in xrange(17) ])
self.NpxThread = v_ptr32()
self.InterruptCount = v_uint32()
self.KernelTime = v_uint32()
self.UserTime = v_uint32()
self.DpcTime = v_uint32()
self.DpcTimeCount = v_uint32()
self.InterruptTime = v_uint32()
self.AdjustDpcThreshold = v_uint32()
self.PageColor = v_uint32()
self.DebuggerSavedIRQL = v_uint8()
self.NodeColor = v_uint8()
self.PrcbPad20 = vstruct.VArray([ v_uint8() for i in xrange(2) ])
self.NodeShiftedColor = v_uint32()
self.ParentNode = v_ptr32()
self.SecondaryColorMask = v_uint32()
self.DpcTimeLimit = v_uint32()
self.PrcbPad21 = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self.CcFastReadNoWait = v_uint32()
self.CcFastReadWait = v_uint32()
self.CcFastReadNotPossible = v_uint32()
self.CcCopyReadNoWait = v_uint32()
self.CcCopyReadWait = v_uint32()
self.CcCopyReadNoWaitMiss = v_uint32()
self.MmSpinLockOrdering = v_uint32()
self.IoReadOperationCount = v_uint32()
self.IoWriteOperationCount = v_uint32()
self.IoOtherOperationCount = v_uint32()
self.IoReadTransferCount = LARGE_INTEGER()
self.IoWriteTransferCount = LARGE_INTEGER()
self.IoOtherTransferCount = LARGE_INTEGER()
self.CcFastMdlReadNoWait = v_uint32()
self.CcFastMdlReadWait = v_uint32()
self.CcFastMdlReadNotPossible = v_uint32()
self.CcMapDataNoWait = v_uint32()
self.CcMapDataWait = v_uint32()
self.CcPinMappedDataCount = v_uint32()
self.CcPinReadNoWait = v_uint32()
self.CcPinReadWait = v_uint32()
self.CcMdlReadNoWait = v_uint32()
self.CcMdlReadWait = v_uint32()
self.CcLazyWriteHotSpots = v_uint32()
self.CcLazyWriteIos = v_uint32()
self.CcLazyWritePages = v_uint32()
self.CcDataFlushes = v_uint32()
self.CcDataPages = v_uint32()
self.CcLostDelayedWrites = v_uint32()
self.CcFastReadResourceMiss = v_uint32()
self.CcCopyReadWaitMiss = v_uint32()
self.CcFastMdlReadResourceMiss = v_uint32()
self.CcMapDataNoWaitMiss = v_uint32()
self.CcMapDataWaitMiss = v_uint32()
self.CcPinReadNoWaitMiss = v_uint32()
self.CcPinReadWaitMiss = v_uint32()
self.CcMdlReadNoWaitMiss = v_uint32()
self.CcMdlReadWaitMiss = v_uint32()
self.CcReadAheadIos = v_uint32()
self.KeAlignmentFixupCount = v_uint32()
self.KeExceptionDispatchCount = v_uint32()
self.KeSystemCalls = v_uint32()
self.AvailableTime = v_uint32()
self.PrcbPad22 = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self.PPLookasideList = vstruct.VArray([ PP_LOOKASIDE_LIST() for i in xrange(16) ])
self.PPNxPagedLookasideList = vstruct.VArray([ GENERAL_LOOKASIDE_POOL() for i in xrange(32) ])
self.PPNPagedLookasideList = vstruct.VArray([ GENERAL_LOOKASIDE_POOL() for i in xrange(32) ])
self.PPPagedLookasideList = vstruct.VArray([ GENERAL_LOOKASIDE_POOL() for i in xrange(32) ])
self.PacketBarrier = v_uint32()
self.ReverseStall = v_uint32()
self.IpiFrame = v_ptr32()
self.PrcbPad3 = vstruct.VArray([ v_uint8() for i in xrange(52) ])
self.CurrentPacket = vstruct.VArray([ v_ptr32() for i in xrange(3) ])
self.TargetSet = v_uint32()
self.WorkerRoutine = v_ptr32()
self.IpiFrozen = v_uint32()
self.PrcbPad4 = vstruct.VArray([ v_uint8() for i in xrange(40) ])
self.RequestSummary = v_uint32()
self.SignalDone = v_ptr32()
self.PrcbPad50 = vstruct.VArray([ v_uint8() for i in xrange(48) ])
self.InterruptLastCount = v_uint32()
self.InterruptRate = v_uint32()
self.DpcData = vstruct.VArray([ KDPC_DATA() for i in xrange(2) ])
self.DpcStack = v_ptr32()
self.MaximumDpcQueueDepth = v_uint32()
self.DpcRequestRate = v_uint32()
self.MinimumDpcRate = v_uint32()
self.DpcLastCount = v_uint32()
self.PrcbLock = v_uint32()
self.DpcGate = KGATE()
self.ThreadDpcEnable = v_uint8()
self.QuantumEnd = v_uint8()
self.DpcRoutineActive = v_uint8()
self.IdleSchedule = v_uint8()
self.DpcRequestSummary = v_uint32()
self.LastTimerHand = v_uint32()
self.LastTick = v_uint32()
self.PeriodicCount = v_uint32()
self.PeriodicBias = v_uint32()
self.ClockInterrupts = v_uint32()
self.ReadyScanTick = v_uint32()
self.BalanceState = v_uint8()
self.GroupSchedulingOverQuota = v_uint8()
self.PrcbPad41 = vstruct.VArray([ v_uint8() for i in xrange(10) ])
self._pad2260 = v_bytes(size=4)
self.TimerTable = KTIMER_TABLE()
self.CallDpc = KDPC()
self.ClockKeepAlive = v_uint32()
self.PrcbPad6 = vstruct.VArray([ v_uint8() for i in xrange(4) ])
self.DpcWatchdogPeriod = v_uint32()
self.DpcWatchdogCount = v_uint32()
self.KeSpinLockOrdering = v_uint32()
self.PrcbPad70 = vstruct.VArray([ v_uint32() for i in xrange(1) ])
self.QueueIndex = v_uint32()
self.DeferredReadyListHead = SINGLE_LIST_ENTRY()
self.WaitListHead = LIST_ENTRY()
self.WaitLock = v_uint32()
self.ReadySummary = v_uint32()
self.ReadyQueueWeight = v_uint32()
self.BuddyPrcb = v_ptr32()
self.StartCycles = v_uint64()
self.GenerationTarget = v_uint64()
self.CycleTime = v_uint64()
self.HighCycleTime = v_uint32()
self.ScbOffset = v_uint32()
self.AffinitizedCycles = v_uint64()
self.DispatcherReadyListHead = vstruct.VArray([ LIST_ENTRY() for i in xrange(32) ])
self.ChainedInterruptList = v_ptr32()
self.LookasideIrpFloat = v_uint32()
self.ScbQueue = RTL_RB_TREE()
self.ScbList = LIST_ENTRY()
self.MmPageFaultCount = v_uint32()
self.MmCopyOnWriteCount = v_uint32()
self.MmTransitionCount = v_uint32()
self.MmCacheTransitionCount = v_uint32()
self.MmDemandZeroCount = v_uint32()
self.MmPageReadCount = v_uint32()
self.MmPageReadIoCount = v_uint32()
self.MmCacheReadCount = v_uint32()
self.MmCacheIoCount = v_uint32()
self.MmDirtyPagesWriteCount = v_uint32()
self.MmDirtyWriteIoCount = v_uint32()
self.MmMappedPagesWriteCount = v_uint32()
self.MmMappedWriteIoCount = v_uint32()
self.CachedCommit = v_uint32()
self.CachedResidentAvailable = v_uint32()
self.HyperPte = v_ptr32()
self.PrcbPad8 = vstruct.VArray([ v_uint8() for i in xrange(4) ])
self.VendorString = vstruct.VArray([ v_uint8() for i in xrange(13) ])
self.InitialApicId = v_uint8()
self.LogicalProcessorsPerPhysicalProcessor = v_uint8()
self.PrcbPad9 = vstruct.VArray([ v_uint8() for i in xrange(5) ])
self.FeatureBits = v_uint32()
self._pad3c98 = v_bytes(size=4)
self.UpdateSignature = LARGE_INTEGER()
self.IsrTime = v_uint64()
self.Stride = v_uint32()
self.PrcbPad90 = v_uint32()
self.PowerState = PROCESSOR_POWER_STATE()
self.PrcbPad91 = vstruct.VArray([ v_uint32() for i in xrange(1) ])
self.DpcWatchdogDpc = KDPC()
self._pad3e58 = v_bytes(size=4)
self.DpcWatchdogTimer = KTIMER()
self.HypercallPageList = SLIST_HEADER()
self.HypercallPageVirtual = v_ptr32()
self.VirtualApicAssist = v_ptr32()
self.StatisticsPage = v_ptr32()
self.Cache = vstruct.VArray([ CACHE_DESCRIPTOR() for i in xrange(5) ])
self.CacheCount = v_uint32()
self.PackageProcessorSet = KAFFINITY_EX()
self.CacheProcessorMask = vstruct.VArray([ v_uint32() for i in xrange(5) ])
self.ScanSiblingMask = v_uint32()
self.CoreProcessorSet = v_uint32()
self.ScanSiblingIndex = v_uint32()
self.LLCLevel = v_uint32()
self.WheaInfo = v_ptr32()
self.EtwSupport = v_ptr32()
self._pad3f10 = v_bytes(size=4)
self.InterruptObjectPool = SLIST_HEADER()
self.PrcbPad92 = vstruct.VArray([ v_uint32() for i in xrange(8) ])
self.ProcessorProfileControlArea = v_ptr32()
self.ProfileEventIndexAddress = v_ptr32()
self.TimerExpirationDpc = KDPC()
self.SynchCounters = SYNCH_COUNTERS()
self.FsCounters = FILESYSTEM_DISK_COUNTERS()
self.Context = v_ptr32()
self.ContextFlagsInit = v_uint32()
self.ExtendedState = v_ptr32()
self.EntropyTimingState = KENTROPY_TIMING_STATE()
self._pad4160 = v_bytes(size=4)
class EXCEPTION_POINTERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExceptionRecord = v_ptr32()
self.ContextRecord = v_ptr32()
class PPM_FFH_THROTTLE_STATE_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.EnableLogging = v_uint8()
self._pad0004 = v_bytes(size=3)
self.MismatchCount = v_uint32()
self.Initialized = v_uint8()
self._pad0010 = v_bytes(size=7)
self.LastValue = v_uint64()
self.LastLogTickCount = LARGE_INTEGER()
class WHEA_XPF_NMI_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Enabled = v_uint8()
class PCW_REGISTRATION_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Version = v_uint32()
self.Name = v_ptr32()
self.CounterCount = v_uint32()
self.Counters = v_ptr32()
self.Callback = v_ptr32()
self.CallbackContext = v_ptr32()
class IO_REMOVE_LOCK_COMMON_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Removed = v_uint8()
self.Reserved = vstruct.VArray([ v_uint8() for i in xrange(3) ])
self.IoCount = v_uint32()
self.RemoveEvent = KEVENT()
class IO_CACHE_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CacheAttribute = v_uint8()
class POP_TRIGGER_WAIT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Event = KEVENT()
self.Status = v_uint32()
self.Link = LIST_ENTRY()
self.Trigger = v_ptr32()
class KAFFINITY_EX(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint16()
self.Size = v_uint16()
self.Reserved = v_uint32()
self.Bitmap = vstruct.VArray([ v_uint32() for i in xrange(1) ])
class ETW_WMITRACE_WORK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LoggerId = v_uint32()
self.SpareUlong = v_uint32()
self.LoggerName = vstruct.VArray([ v_uint8() for i in xrange(65) ])
self.FileName = vstruct.VArray([ v_uint8() for i in xrange(129) ])
self._pad00cc = v_bytes(size=2)
self.MaximumFileSize = v_uint32()
self.MinBuffers = v_uint32()
self.MaxBuffers = v_uint32()
self.BufferSize = v_uint32()
self.Mode = v_uint32()
self.FlushTimer = v_uint32()
self._pad00e8 = v_bytes(size=4)
self.Status = v_uint32()
self._pad00f0 = v_bytes(size=4)
class PROVIDER_BINARY_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.ConsumersNotified = v_uint8()
self.Spare = vstruct.VArray([ v_uint8() for i in xrange(3) ])
self.DebugIdSize = v_uint32()
self.DebugId = CVDD()
class MMVIEW_CONTROL_AREA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ControlArea = v_ptr32()
class _unnamed_30885(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Internal = v_uint32()
class KSEMAPHORE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DISPATCHER_HEADER()
self.Limit = v_uint32()
class _unnamed_28974(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.e2 = _unnamed_29028()
class _unnamed_28971(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LongFlags = v_uint32()
class KALPC_HANDLE_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint32()
self.ObjectType = v_uint32()
self.DuplicateContext = v_ptr32()
class _unnamed_35666(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LongFlags2 = v_uint32()
class CM_CACHED_VALUE_INDEX(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CellIndex = v_uint32()
self.Data = _unnamed_34744()
class _unnamed_34363(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = _unnamed_35986()
class _unnamed_30806(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ReferenceCache = v_uint8()
class LOG_HANDLE_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LogHandle = v_ptr32()
self.FlushToLsnRoutine = v_ptr32()
self.QueryLogHandleInfoRoutine = v_ptr32()
self.DirtyPageStatistics = DIRTY_PAGE_STATISTICS()
self.DirtyPageThresholds = DIRTY_PAGE_THRESHOLDS()
self.AdditionalPagesToWrite = v_uint32()
self.CcLWScanDPThreshold = v_uint32()
self.LargestLsnForCurrentLWScan = LARGE_INTEGER()
self.RelatedFileObject = v_ptr32()
self.LargestLsnFileObjectKey = v_uint32()
self.LastLWTimeStamp = LARGE_INTEGER()
self.Flags = v_uint32()
self._pad0050 = v_bytes(size=4)
class _unnamed_35669(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SequentialVa = MI_VAD_SEQUENTIAL_INFO()
class _unnamed_34699(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = LARGE_INTEGER()
self.Length64 = v_uint32()
class PCW_COUNTER_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CounterMask = v_uint64()
self.InstanceMask = v_ptr32()
self._pad0010 = v_bytes(size=4)
class _unnamed_34693(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = LARGE_INTEGER()
self.Length40 = v_uint32()
class MI_SECTION_IMAGE_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExportedImageInformation = SECTION_IMAGE_INFORMATION()
self.InternalImageInformation = MI_EXTRA_IMAGE_INFORMATION()
class _unnamed_34696(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = LARGE_INTEGER()
self.Length48 = v_uint32()
class _unnamed_34400(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BlockedDriverGuid = GUID()
class DBGKD_WRITE_BREAKPOINT32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BreakPointAddress = v_uint32()
self.BreakPointHandle = v_uint32()
class DBGKD_BREAKPOINTEX(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BreakPointCount = v_uint32()
self.ContinueStatus = v_uint32()
class _unnamed_28035(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint32()
class IMAGE_NT_HEADERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.FileHeader = IMAGE_FILE_HEADER()
self.OptionalHeader = IMAGE_OPTIONAL_HEADER()
class _unnamed_34402(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ParentId = vstruct.VArray([ v_uint16() for i in xrange(1) ])
class ETW_REPLY_QUEUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Queue = KQUEUE()
self.EventsLost = v_uint32()
class OBJECT_TYPE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TypeList = LIST_ENTRY()
self.Name = UNICODE_STRING()
self.DefaultObject = v_ptr32()
self.Index = v_uint8()
self._pad0018 = v_bytes(size=3)
self.TotalNumberOfObjects = v_uint32()
self.TotalNumberOfHandles = v_uint32()
self.HighWaterNumberOfObjects = v_uint32()
self.HighWaterNumberOfHandles = v_uint32()
self.TypeInfo = OBJECT_TYPE_INITIALIZER()
self.TypeLock = EX_PUSH_LOCK()
self.Key = v_uint32()
self.CallbackList = LIST_ENTRY()
class ALPC_MESSAGE_ZONE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Mdl = v_ptr32()
self.UserVa = v_ptr32()
self.UserLimit = v_ptr32()
self.SystemVa = v_ptr32()
self.SystemLimit = v_ptr32()
self.Size = v_uint32()
class KNODE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeepIdleSet = v_uint32()
self._pad0040 = v_bytes(size=60)
self.ProximityId = v_uint32()
self.NodeNumber = v_uint16()
self.PrimaryNodeNumber = v_uint16()
self.MaximumProcessors = v_uint8()
self.Flags = flags()
self.Stride = v_uint8()
self.NodePad0 = v_uint8()
self.Affinity = GROUP_AFFINITY()
self.IdleCpuSet = v_uint32()
self.IdleSmtSet = v_uint32()
self._pad0080 = v_bytes(size=32)
self.Seed = v_uint32()
self.Lowest = v_uint32()
self.Highest = v_uint32()
self.ParkLock = v_uint32()
self.NonParkedSet = v_uint32()
self._pad00c0 = v_bytes(size=44)
class PRIVILEGE_SET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PrivilegeCount = v_uint32()
self.Control = v_uint32()
self.Privilege = vstruct.VArray([ LUID_AND_ATTRIBUTES() for i in xrange(1) ])
class ALPC_HANDLE_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Handles = v_ptr32()
self.TotalHandles = v_uint32()
self.Flags = v_uint32()
self.Lock = EX_PUSH_LOCK()
class _unnamed_27736(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Overlay = _unnamed_27809()
self._pad0030 = v_bytes(size=4)
class CM_KEY_HASH_TABLE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Lock = EX_PUSH_LOCK()
self.Owner = v_ptr32()
self.Entry = v_ptr32()
class HMAP_DIRECTORY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Directory = vstruct.VArray([ v_ptr32() for i in xrange(1024) ])
class IO_WORKITEM(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WorkItem = WORK_QUEUE_ITEM()
self.Routine = v_ptr32()
self.IoObject = v_ptr32()
self.Context = v_ptr32()
self.Type = v_uint32()
self.ActivityId = GUID()
class RTL_DYNAMIC_HASH_TABLE_ENUMERATOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.HashEntry = RTL_DYNAMIC_HASH_TABLE_ENTRY()
self.ChainHead = v_ptr32()
self.BucketIndex = v_uint32()
class SYSTEM_POWER_CAPABILITIES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PowerButtonPresent = v_uint8()
self.SleepButtonPresent = v_uint8()
self.LidPresent = v_uint8()
self.SystemS1 = v_uint8()
self.SystemS2 = v_uint8()
self.SystemS3 = v_uint8()
self.SystemS4 = v_uint8()
self.SystemS5 = v_uint8()
self.HiberFilePresent = v_uint8()
self.FullWake = v_uint8()
self.VideoDimPresent = v_uint8()
self.ApmPresent = v_uint8()
self.UpsPresent = v_uint8()
self.ThermalControl = v_uint8()
self.ProcessorThrottle = v_uint8()
self.ProcessorMinThrottle = v_uint8()
self.ProcessorMaxThrottle = v_uint8()
self.FastSystemS4 = v_uint8()
self.Hiberboot = v_uint8()
self.WakeAlarmPresent = v_uint8()
self.AoAc = v_uint8()
self.DiskSpinDown = v_uint8()
self.spare3 = vstruct.VArray([ v_uint8() for i in xrange(8) ])
self.SystemBatteriesPresent = v_uint8()
self.BatteriesAreShortTerm = v_uint8()
self.BatteryScale = vstruct.VArray([ BATTERY_REPORTING_SCALE() for i in xrange(3) ])
self.AcOnLineWake = v_uint32()
self.SoftLidWake = v_uint32()
self.RtcWake = v_uint32()
self.MinDeviceWakeState = v_uint32()
self.DefaultLowLatencyWake = v_uint32()
class THERMAL_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ThermalStamp = v_uint32()
self.ThermalConstant1 = v_uint32()
self.ThermalConstant2 = v_uint32()
self.Processors = v_uint32()
self.SamplingPeriod = v_uint32()
self.CurrentTemperature = v_uint32()
self.PassiveTripPoint = v_uint32()
self.CriticalTripPoint = v_uint32()
self.ActiveTripPointCount = v_uint8()
self._pad0024 = v_bytes(size=3)
self.ActiveTripPoint = vstruct.VArray([ v_uint32() for i in xrange(10) ])
class MMEXTEND_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CommittedSize = v_uint64()
self.ReferenceCount = v_uint32()
self._pad0010 = v_bytes(size=4)
class VF_TARGET_ALL_SHARED_EXPORT_THUNKS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SharedExportThunks = v_ptr32()
self.PoolSharedExportThunks = v_ptr32()
self.OrderDependentSharedExportThunks = v_ptr32()
self.XdvSharedExportThunks = v_ptr32()
class RTL_USER_PROCESS_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MaximumLength = v_uint32()
self.Length = v_uint32()
self.Flags = v_uint32()
self.DebugFlags = v_uint32()
self.ConsoleHandle = v_ptr32()
self.ConsoleFlags = v_uint32()
self.StandardInput = v_ptr32()
self.StandardOutput = v_ptr32()
self.StandardError = v_ptr32()
self.CurrentDirectory = CURDIR()
self.DllPath = UNICODE_STRING()
self.ImagePathName = UNICODE_STRING()
self.CommandLine = UNICODE_STRING()
self.Environment = v_ptr32()
self.StartingX = v_uint32()
self.StartingY = v_uint32()
self.CountX = v_uint32()
self.CountY = v_uint32()
self.CountCharsX = v_uint32()
self.CountCharsY = v_uint32()
self.FillAttribute = v_uint32()
self.WindowFlags = v_uint32()
self.ShowWindowFlags = v_uint32()
self.WindowTitle = UNICODE_STRING()
self.DesktopInfo = UNICODE_STRING()
self.ShellInfo = UNICODE_STRING()
self.RuntimeData = UNICODE_STRING()
self.CurrentDirectores = vstruct.VArray([ RTL_DRIVE_LETTER_CURDIR() for i in xrange(32) ])
self.EnvironmentSize = v_uint32()
self.EnvironmentVersion = v_uint32()
self.PackageDependencyData = v_ptr32()
self.ProcessGroupId = v_uint32()
class _unnamed_34963(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.s1 = _unnamed_36621()
class ACTIVATION_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class FILESYSTEM_DISK_COUNTERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FsBytesRead = v_uint64()
self.FsBytesWritten = v_uint64()
class MM_DRIVER_VERIFIER_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Level = v_uint32()
self.RaiseIrqls = v_uint32()
self.AcquireSpinLocks = v_uint32()
self.SynchronizeExecutions = v_uint32()
self.AllocationsAttempted = v_uint32()
self.AllocationsSucceeded = v_uint32()
self.AllocationsSucceededSpecialPool = v_uint32()
self.AllocationsWithNoTag = v_uint32()
self.TrimRequests = v_uint32()
self.Trims = v_uint32()
self.AllocationsFailed = v_uint32()
self.AllocationsFailedDeliberately = v_uint32()
self.Loads = v_uint32()
self.Unloads = v_uint32()
self.UnTrackedPool = v_uint32()
self.UserTrims = v_uint32()
self.CurrentPagedPoolAllocations = v_uint32()
self.CurrentNonPagedPoolAllocations = v_uint32()
self.PeakPagedPoolAllocations = v_uint32()
self.PeakNonPagedPoolAllocations = v_uint32()
self.PagedBytes = v_uint32()
self.NonPagedBytes = v_uint32()
self.PeakPagedBytes = v_uint32()
self.PeakNonPagedBytes = v_uint32()
self.BurstAllocationsFailedDeliberately = v_uint32()
self.SessionTrims = v_uint32()
self.OptionChanges = v_uint32()
self.VerifyMode = v_uint32()
self.PreviousBucketName = UNICODE_STRING()
self.ActivityCounter = v_uint32()
self.PreviousActivityCounter = v_uint32()
self.WorkerTrimRequests = v_uint32()
class _unnamed_37043(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Mbr = _unnamed_37222()
self._pad0010 = v_bytes(size=8)
class SEP_LOWBOX_NUMBER_MAPPING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Lock = EX_PUSH_LOCK()
self.Bitmap = RTL_BITMAP()
self.HashTable = v_ptr32()
self.Active = v_uint8()
self._pad0014 = v_bytes(size=3)
class IOP_IRP_EXTENSION_STATUS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint32()
self.ActivityId = v_uint32()
self.IoTracking = v_uint32()
class ALPC_COMPLETION_LIST_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.StartMagic = v_uint64()
self.TotalSize = v_uint32()
self.ListOffset = v_uint32()
self.ListSize = v_uint32()
self.BitmapOffset = v_uint32()
self.BitmapSize = v_uint32()
self.DataOffset = v_uint32()
self.DataSize = v_uint32()
self.AttributeFlags = v_uint32()
self.AttributeSize = v_uint32()
self._pad0040 = v_bytes(size=20)
self.State = ALPC_COMPLETION_LIST_STATE()
self.LastMessageId = v_uint32()
self.LastCallbackId = v_uint32()
self._pad0080 = v_bytes(size=48)
self.PostCount = v_uint32()
self._pad00c0 = v_bytes(size=60)
self.ReturnCount = v_uint32()
self._pad0100 = v_bytes(size=60)
self.LogSequenceNumber = v_uint32()
self._pad0140 = v_bytes(size=60)
self.UserLock = RTL_SRWLOCK()
self._pad0148 = v_bytes(size=4)
self.EndMagic = v_uint64()
self._pad0180 = v_bytes(size=48)
class ETW_QUEUE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.DataBlock = v_ptr32()
self.RegEntry = v_ptr32()
self.ReplyObject = v_ptr32()
self.WakeReference = v_ptr32()
self.RegIndex = v_uint16()
self.ReplyIndex = v_uint16()
self.Flags = v_uint32()
class _unnamed_27954(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.EaList = v_ptr32()
self.EaListLength = v_uint32()
self.EaIndex = v_uint32()
class u(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.KeyNode = CM_KEY_NODE()
class IO_RESOURCE_REQUIREMENTS_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListSize = v_uint32()
self.InterfaceType = v_uint32()
self.BusNumber = v_uint32()
self.SlotNumber = v_uint32()
self.Reserved = vstruct.VArray([ v_uint32() for i in xrange(3) ])
self.AlternativeLists = v_uint32()
self.List = vstruct.VArray([ IO_RESOURCE_LIST() for i in xrange(1) ])
class VF_WATCHDOG_IRP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.Irp = v_ptr32()
self.DueTickCount = v_uint32()
self.Inserted = v_uint8()
self.TrackedStackLocation = v_uint8()
self.CancelTimeoutTicks = v_uint16()
class MMWSLE_NONDIRECT_HASH(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Key = v_ptr32()
self.Index = v_uint32()
class _unnamed_27959(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
| apache-2.0 | -5,268,081,703,299,880,000 | 32.494119 | 110 | 0.635192 | false | 3.210507 | false | false | false |
paulscottrobson/fred | uForth/ufc.py | 1 | 7340 | import re,sys
# ********************************************************************************************************************
# Exception Handler
# ********************************************************************************************************************
class ForthException(Exception):
def __init__(self,msg):
print(msg)
sys.exit(1)
# ********************************************************************************************************************
# Micro Forth Core Class
# ********************************************************************************************************************
class uForthCore:
def __init__(self):
self.core = [ord(x) for x in open("uforth.core").read(-1)] # read in core file
self.dictionary = {} # extract dictionary.
pos = 2048
while self.core[pos] != 0xFF: # keep going till done.
addr = self.core[pos] * 256 + self.core[pos+1] # word address.
word = "" # extract ASCIIZ name
pos += 2
while self.core[pos] != 0:
word = word + chr(self.core[pos])
pos += 1
pos += 1
self.dictionary[word] = addr # store it.
self.vocabulary = self.dictionary.keys() # sorted vocab list
self.vocabulary.sort()
def getCore(self,address):
return self.core[address]
def getCoreSize(self):
return self.dictionary["$$topkernel"] # where code starts.
def getVocabulary(self):
return self.vocabulary
def getWordAddress(self,word):
return self.dictionary[word.lower()]
# ********************************************************************************************************************
# Word source
# ********************************************************************************************************************
class WordStream:
def __init__(self,fileList = None):
if fileList is None: # load in from forth.make
fileList = [x.strip() for x in open("uforth.make").readlines() if x.strip() != ""]
self.words = []
for f in fileList: # for each file
src = open(f).readlines() # read in the source
src = [x if x.find("//") < 0 else x[:x.find("//")] for x in src] # remove comments
src = " ".join(src).replace("\t"," ").replace("\n"," ").lower() # one long string, no tab/return
for w in src.split(): # split into words
if w != "": # append non null
self.words.append(w)
self.pointer = 0 # index into word stream
def endOfStream(self): # check end of stream
return self.pointer >= len(self.words)
def get(self): # get next word, "" if none.
w = "" if self.endOfStream() else self.words[self.pointer]
self.pointer += 1
return w
# ********************************************************************************************************************
# Compiler
# ********************************************************************************************************************
class Compiler:
def __init__(self,wordStream):
self.core = uForthCore() # get the core object
self.wordStream = wordStream # save reference to word stream.
self.dictionary = {} # copy it
for word in self.core.getVocabulary():
self.dictionary[word] = self.core.getWordAddress(word)
self.code = [] # copy the core
for i in range(0,self.core.getCoreSize()):
self.code.append(self.core.getCore(i))
self.pointer = self.core.getCoreSize() # next free address
self.currentDefinition = None # current definition (for self)
self.nextVariable = 0 # next variable to be allocated
self.pendingThen = None # no pending then
self.isListing = True
print("Loaded {0} bytes uForth core.".format(self.pointer))
while not self.wordStream.endOfStream():
self.compile(self.wordStream.get())
dStack = (0xFF + self.nextVariable) / 2 # split dstack / rstack
print(self.nextVariable+0xFF)/2
self.code[self.core.getWordAddress("$$startmarker")+2] = dStack
open("a.out","wb").write("".join([chr(x) for x in self.code]))
def define(self,name,address,show = True):
assert name != "","No name provided." # check valid name
self.dictionary[name] = address # remember pointer
if self.isListing and show:
print("{0:04x} ==== :{1} ====".format(address,name))
if name == "__main": # if __main tell uForth
startPtr = self.core.getWordAddress("$$startmarker")
self.code[startPtr] = address / 256
self.code[startPtr+1] = address & 255
def compile(self,word):
if word == ':': # word definition ?
name = self.wordStream.get()
self.define(name,self.pointer)
if name != "__main":
self.compileByte(0xDD,"(sep rd)")
self.currentDefinition = self.pointer
elif word == "variable": # variable definition ?
self.define(self.wordStream.get(),0x1000+self.nextVariable)
self.nextVariable += 1
elif word == "alloc": # allocate memory ?
self.nextVariable += int(self.wordStream.get())
elif word == "if": # if ?
self.pendingThen = self.pointer
self.compileWord("0br")
self.compileByte(0,"(placeholder)")
elif word == "then": # then ?
self.closeThen()
elif word == "self": # tail recursion ?
self.compileWord("br")
n = self.currentDefinition - (self.pointer+1)
self.compileByte(n,"("+str(n)+")")
elif re.match("^\\-?\\d+$",word): # decimal constant ?
self.compileConstant(int(word))
elif re.match("^\\$[0-9a-f]+$",word): # hexadecimal constant ?
self.compileConstant(int(word[1:],16))
elif re.match("^\\[[0-9a-f]+\\]$",word): # byte data ?
word = word[1:-1]
for i in range(0,len(word),2):
n = int(word[i:i+2],16)
self.compileByte(n,"data "+str(n))
else: # is it a dictionary word ?
if word not in self.dictionary: # check the dictionary.
raise ForthException("Don't understand "+word)
if (self.dictionary[word] & 0x1000) != 0:
self.compileConstant(self.dictionary[word] & 0xFF)
else:
self.compileWord(word)
if word == ";": # ; close any pending thens.
self.closeThen()
def closeThen(self):
if self.pendingThen is not None:
self.code[self.pendingThen+1] = self.pointer - (self.pendingThen+2)
if self.isListing:
print("{0:04x} {1:02x} branch patch".format(self.pendingThen+1,self.code[self.pendingThen+1]))
self.pendingThen = None
def compileConstant(self,constant):
if str(constant) in self.dictionary:
self.compileWord(str(constant))
else:
self.compileWord("literal")
self.compileByte(constant,"("+str(constant)+")")
def compileWord(self,word):
assert word in self.dictionary,"Word "+word+" unknown."
addr = self.dictionary[word]
if addr < 0xF8:
self.compileByte(addr,word)
else:
self.compileByte((addr >> 8) | 0xF8,word)
self.compileByte(addr & 0xFF,"")
def compileByte(self,byte,text):
byte &= 0xFF
if self.isListing:
print("{0:04x} {1:02x} {2}".format(self.pointer,byte,text))
self.code.append(byte)
self.pointer += 1
c = Compiler(WordStream())
| mit | 3,355,735,194,558,943,700 | 40.235955 | 118 | 0.524523 | false | 3.396576 | false | false | false |
sug4rok/Servus | Servus/base/settings.py | 1 | 8586 | # coding=utf-8
import os
from sys import platform
from locale import setlocale, LC_ALL
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ky-kr37p8k^qdos0dk(ijv9m%*8(zre2+s@yct%+w(2(z1$2h2'
DEBUG = False
ALLOWED_HOSTS = ['localhost', '127.0.0.1']
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'django_cron',
'base',
'slideshow',
'plugins',
)
# Containers - applications for plug-ins of the Servus
CONTAINER_APPS = (
'system', # System application. Don't delete it!
'home', # System application. Don't delete it!
'events', # System application. Don't delete it!
'climate',
'weather',
)
PLUGINS = (
# 'plugins.user_sms_ru', # Sending sms through the website sms.ru
'plugins.arduino', # Arduino controller
# 'plugins.arduino_bh1750', # for connecting a BH1750 sensors (ambient light measurement) to the Arduino
# 'plugins.arduino_bmp085', # for connecting a BMP085/BMP180 sensor to the Arduino
# 'plugins.arduino_dht', # for connecting a DHT sensor (DHT11, DHT22) to the Arduino
# 'plugins.arduino_ds18d20', # for connecting a DS18D20 sensor to the Arduino
'plugins.arduino_on_off_switch', # on/off switch states
# 'plugins.arduino_reed_switch', # reed switch sensors
# 'plugins.arduino_yl83', # for connecting a YL-83 raindrop sensors
# 'plugins.weather_openweathermap', # weather forecast from openweathermap.org
# 'plugins.weather_weather_ua', # weather from weather.ua
# 'plugins.system_hddtemp', # temperature HDD in linux (need to install hddtemp)
'plugins.system_ip_online', # ping utility
'plugins.system_mac_online', # search for device mac address in the home network
)
INSTALLED_APPS += (PLUGINS + CONTAINER_APPS)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'base.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['/plugins/{0}/tempates/'.format(p.split('.')[1]) for p in PLUGINS],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
],
'debug': False,
},
},
]
# Media files
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATIC_URL = '/static/'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
WSGI_APPLICATION = 'base.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
# 'ENGINE': 'django.db.backends.postgresql_psycopg2',
# 'NAME': 'servusdb',
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
CONN_MAX_AGE = 60
if 'win' in platform:
OS = 'windows'
LOCALE = 'Russian'
elif 'linux' in platform:
OS = 'linux'
LOCALE = 'ru_RU.utf8'
else:
OS = 'unknown'
LOCALE = ''
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
TIME_ZONE = 'Europe/Moscow'
LANGUAGE_CODE = 'ru-RU'
setlocale(LC_ALL, LOCALE)
USE_I18N = True
USE_L10N = True
USE_TZ = False
# =================== #
# Servus settings #
# =================== #
SITE_NAME = 'Servus'
# Bootstrap theme (dark or light)
THEME = 'dark'
ALERTS = {0: 'default', 1: 'success', 2: 'info', 3: 'warning', 4: 'danger'}
# Настройки почтового аккаунта gmail для отправки писем
# Запуск эмулятора почтового сервера: python -m smtpd -n -c DebuggingServer localhost:587
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
EMAIL_USE_TLS = True
EMAIL_HOST = 'localhost' # 'smtp.gmail.com'
EMAIL_PORT = 587
EMAIL_HOST_USER = 'user@gmail.com'
# EMAIL_HOST_PASSWORD = 'password'
# Cookies settings
SESSION_COOKIE_NAME = 'Servus_sessionid'
SESSION_COOKIE_AGE = 99999999
# Tasks for django-cron
CRON_CLASSES = [
'django_cron.cron.FailedRunsNotificationCronJob',
'base.cron.DelOutdatedEvents',
'slideshow.cron.SlideshowJob',
# 'events.cron.EmailsSendJob',
# 'events.cron.SMSSendJob',
# 'system.cron.PerfomArduinoCommands',
# 'home.cron.GetOnOffSwitchState',
# 'climate.cron.GetAmbientLightData',
# 'climate.cron.GetPressureData',
# 'climate.cron.GetRaindropData',
# 'climate.cron.GetTempHumidData',
# 'weather.cron.GetWeatherJob',
# 'plugins.system_hddtemp.cron.GetHDDTemp',
# 'plugins.system_ip_online.cron.GetIPOnline',
# 'plugins.system_mac_online.cron.GetMACOnline',
]
DJANGO_CRON_DELETE_LOGS_OLDER_THAN = 32
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse',
},
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue',
},
},
'formatters': {
'main_formatter': {
'format': '%(asctime)s [%(levelname)s] %(name)s:\n'
'Message: %(message)s\n'
'Path: %(pathname)s:%(lineno)d in function: %(funcName)s\n',
'datefmt': '%Y-%m-%d %H:%M:%S',
},
},
'handlers': {
'production_file': {
'level': 'WARNING',
'class': 'logging.handlers.RotatingFileHandler',
'filename': os.path.join(BASE_DIR, 'logs/product.log'),
'maxBytes': 1024 * 1024 * 5, # 5 MB
'backupCount': 7,
'formatter': 'main_formatter',
'filters': ['require_debug_false'],
},
'debug_file': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': os.path.join(BASE_DIR, 'logs/debug.log'),
'maxBytes': 1024 * 1024 * 5, # 5 MB
'backupCount': 7,
'formatter': 'main_formatter',
'filters': ['require_debug_true'],
},
'console': {
'level': 'INFO',
'filters': ['require_debug_true'],
'class': 'logging.StreamHandler',
},
'null': {
'class': 'logging.NullHandler',
},
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django': {
'handlers': ['null', ],
},
'django.request': {
'handlers': ['mail_admins', 'console'],
'level': 'ERROR',
'propagate': True,
},
'django.security': {
'handlers': ['mail_admins', 'console'],
'level': 'ERROR',
'propagate': True,
},
'py.warnings': {
'handlers': ['null', ],
},
'': {
'handlers': ['console', 'production_file', 'debug_file'],
'level': 'DEBUG',
},
}
}
| mit | -8,634,184,277,882,173,000 | 31.507874 | 109 | 0.588415 | false | 3.357396 | false | false | false |
isard-vdi/isard | webapp/webapp/webapp/admin/views/AdminHypersViews.py | 1 | 2254 | # Copyright 2017 the Isard-vdi project authors:
# Josep Maria Viñolas Auquer
# Alberto Larraz Dalmases
# License: AGPLv3
#!flask/bin/python
# coding=utf-8
import json
import time
from flask import render_template, Response, request, redirect, url_for, flash
from flask_login import login_required
from webapp import app
from ...lib import admin_api
app.adminapi = admin_api.isardAdmin()
import rethinkdb as r
from ...lib.flask_rethink import RethinkDB
db = RethinkDB(app)
db.init_app(app)
from .decorators import isAdmin
'''
HYPERVISORS
'''
@app.route('/isard-admin/admin/hypervisors', methods=['GET'])
@login_required
@isAdmin
def admin_hypervisors():
# ~ hypers=app.adminapi.hypervisors_get()
return render_template('admin/pages/hypervisors.html', title="Hypervisors", header="Hypervisors", nav="Hypervisors")
@app.route('/isard-admin/admin/hypervisors/json')
@app.route('/isard-admin/admin/hypervisors/json/<id>')
@login_required
@isAdmin
def admin_hypervisors_json(id=None):
domain = app.adminapi.hypervisors_get(id)
return json.dumps(domain), 200, {'Content-Type':'application/json'}
@app.route('/isard-admin/admin/hypervisors_pools', methods=['GET','POST'])
@login_required
@isAdmin
def hypervisors_pools_get():
res=True
if request.method == 'POST':
ca=request.form['viewer-certificate']
pre_dict=request.form
pre_dict.pop('viewer-certificate', None)
create_dict=app.isardapi.f.unflatten_dict(request.form)
create_dict['viewer']['certificate']=ca
#check and parse name not done!
create_dict['id']=create_dict['name']
create_dict['interfaces']=[create_dict['interfaces']]
if res == True:
flash('Hypervisor pool '+create_dict['id']+' added to the system.','success')
return render_template('admin/pages/hypervisors.html', title="Hypervisors", header="Hypervisors", nav="Hypervisors")
else:
flash('Could not create hypervisor pool. Maybe you have one with the same name?','danger')
return render_template('pages/hypervisors.html', nav="Hypervisors")
return json.dumps(app.adminapi.hypervisors_pools_get(flat=False)), 200, {'Content-Type': 'application/json'}
| agpl-3.0 | -2,627,908,517,283,964,400 | 33.136364 | 128 | 0.695073 | false | 3.45023 | false | false | false |
bgris/ODL_bgris | lib/python3.5/site-packages/zmq/devices/proxydevice.py | 43 | 2499 | """Proxy classes and functions."""
# Copyright (C) PyZMQ Developers
# Distributed under the terms of the Modified BSD License.
import zmq
from zmq.devices.basedevice import Device, ThreadDevice, ProcessDevice
class ProxyBase(object):
"""Base class for overriding methods."""
def __init__(self, in_type, out_type, mon_type=zmq.PUB):
Device.__init__(self, in_type=in_type, out_type=out_type)
self.mon_type = mon_type
self._mon_binds = []
self._mon_connects = []
self._mon_sockopts = []
def bind_mon(self, addr):
"""Enqueue ZMQ address for binding on mon_socket.
See zmq.Socket.bind for details.
"""
self._mon_binds.append(addr)
def connect_mon(self, addr):
"""Enqueue ZMQ address for connecting on mon_socket.
See zmq.Socket.bind for details.
"""
self._mon_connects.append(addr)
def setsockopt_mon(self, opt, value):
"""Enqueue setsockopt(opt, value) for mon_socket
See zmq.Socket.setsockopt for details.
"""
self._mon_sockopts.append((opt, value))
def _setup_sockets(self):
ins,outs = Device._setup_sockets(self)
ctx = self._context
mons = ctx.socket(self.mon_type)
# set sockopts (must be done first, in case of zmq.IDENTITY)
for opt,value in self._mon_sockopts:
mons.setsockopt(opt, value)
for iface in self._mon_binds:
mons.bind(iface)
for iface in self._mon_connects:
mons.connect(iface)
return ins,outs,mons
def run_device(self):
ins,outs,mons = self._setup_sockets()
zmq.proxy(ins, outs, mons)
class Proxy(ProxyBase, Device):
"""Threadsafe Proxy object.
See zmq.devices.Device for most of the spec. This subclass adds a
<method>_mon version of each <method>_{in|out} method, for configuring the
monitor socket.
A Proxy is a 3-socket ZMQ Device that functions just like a
QUEUE, except each message is also sent out on the monitor socket.
A PUB socket is the most logical choice for the mon_socket, but it is not required.
"""
pass
class ThreadProxy(ProxyBase, ThreadDevice):
"""Proxy in a Thread. See Proxy for more."""
pass
class ProcessProxy(ProxyBase, ProcessDevice):
"""Proxy in a Process. See Proxy for more."""
pass
__all__ = [
'Proxy',
'ThreadProxy',
'ProcessProxy',
]
| gpl-3.0 | -4,560,696,593,615,876,000 | 26.766667 | 87 | 0.616647 | false | 3.826953 | false | false | false |
00has00/modbus-manager | modbus-manager.py | 1 | 11775 | #! /usr/bin/python
##
##
##
import threading
import time
import json
import paho.mqtt.client as mqtt
from collections import deque
from pymodbus.client.sync import ModbusSerialClient as ModbusClient
##
##Node Configureation
MQTT_SERVER = "mqtt-server"
STATE = "active"
LOCATION = "garage"
STATUS_INTERVAL=300
BASE = "/" + STATE + "/" + LOCATION
SUB = BASE + "/+/control"
SYS_STATUS_QUEUE = BASE + "/system/status"
SYS_MESSAGE_QUEUE = BASE + "/system/messages"
##
## Modbus Configuration
MODBUS_TYPE = 'rtu'
MODBUS_PORT = '/dev/ttyUSB0'
MODBUS_BAUDRATE = 9600
MODBUS_UNITID = 2
##
## Modbus Exception Codes
MODBUS_EXCEPTIONS = (
"",
"ILLEGAL FUNCTION",
"ILLEGAL DATA ADDRESS",
"ILLEGAL DATA VALUE",
"SERVER DEVICE FAILURE",
"ACKNOWLEDGE",
"SERVER DEVICE BUSY",
"MEMORY PARITY ERROR",
"GATEWAY PATH UNAVAILABLE",
"GATEWAY TARGET DEVICE FAILED TO RESPOND"
)
##
##Sensor Definitions
## Move to a config file someday...
##
sensors = {
'bore_pump_run': {
'init':'default',
'access':('read','write'),
'status':0,
'status-update':1,
'control':"",
'type':'modbus-memory',
'address':3078
},
'transfer_pump_run': {
'init':'default',
'access':('read','write'),
'status':0,
'status-update':1,
'control':"",
'type':'modbus-memory',
'address':3072
},
# status=1 - bore_pump fault
'bore_pump_fault': {
'init':'current',
'access':('read'),
'status':0,
'status-update':1,
'control':"",
'type':'modbus-input',
'address':2055
},
'bore_tank_level': {
'init':'current',
'access':('read'),
'status':0,
'status-update':1,
'control':"",
'type':'modbus-analog',
'address':0x108,
'register':0,
'name':"Bore Tank",
'outputMax':85000,
'sensorMax':5000,
'sensorMin':28
},
'house_tank_level': {
'init':'current',
'access':('read'),
'status':0,
'status-update':1,
'control':"",
'type':'modbus-analog',
'address':0x108,
'register':2,
'name':"House Tank",
'outputMax':40000,
'sensorMax':7805,
'sensorMin':28
},
'rain_tank_level': {
'init':'current',
'access':('read'),
'status':0,
'status-update':1,
'control':"",
'type':'modbus-analog',
'address':0x108,
'register':1,
'name':"Rain Tank",
'outputMax':80000,
'sensorMax':5600,
'sensorMin':28
}
}
##
##
## MQTT Callback Functions
def mqtt_on_connect(client, userdata, flags, rc):
mqttretries = 0
print "MQTT Connection established to host: " + str(MQTT_SERVER)
def mqtt_on_disconnect(client, userdata, rc):
if rc != 0:
print("Unexpected disconnection.")
mqttretries += 1
if mqttretries > 3:
ERR_FATAL=1
else:
mqttc.connect(MQTT_SERVER)
def mqtt_on_message(client, userdata, message):
#print("MQTT Received message '" + str(message.payload) + "' on topic '" + message.topic + "' with QoS " + str(message.qos))
fractions = message.topic.split("/")
if fractions[1] == 'active':
if fractions[2] == LOCATION:
if fractions[4] == 'control':
## SPLIT OUT SYSTEM COMMAND PROCESSING TO A SEPERATE FUNCTION.
if fractions[3] == 'system' and fractions[4] == 'control' and message.payload == 'showSensors':
#print "publishing to: " + SYS_STATUS_QUEUE
mqttc.publish( SYS_STATUS_QUEUE, json.dumps(sensors) )
else:
## NEED TO MAKE MORE GENERIC ONE DAY, VERY FOCUSED ON receiving ON|OFF MESSAGES
msg = { 'sensor':fractions[3], 'action':fractions[4], 'msg':message.payload }
with messageQueueLock:
messageQueue.append(msg)
##
##
## Modbus Functions
def modbus_bit_read(address):
sts = modbusHandle.read_coils(int(address),count=1,unit=MODBUS_UNITID)
if sts.bits[0] == True:
return 1
else:
return 0
def modbus_bit_write(address, data=None):
if data == None:
data =0
#print "Setting address" + str(hex(address)) + " to: " + str(data)
if data == 0:
sts = modbusHandle.write_coil(int(address), False, unit=MODBUS_UNITID)
return sts
if data == 1:
sts = modbusHandle.write_coil(int(address), True, unit=MODBUS_UNITID)
return sts
return 0xff
def modbus_input_read(address):
#print "Reading Address" + str(address)
#return 0
sts = modbusHandle.read_discrete_inputs(int(address), count=1, unit=MODBUS_UNITID)
if sts.bits[0] == True:
return 1
else:
return 0
def modbus_analog_read(address, register=None):
if register == None:
register = 0
#print "Reading address: " + str(address) + " Register: " + str(register)
#return 2222
sts = modbusHandle.read_holding_registers(address, count=4, unit=MODBUS_UNITID)
#print sts
try:
assert(sts.function_code < 0x80)
except:
print "Modbus Error: " + str(MODBUS_EXCEPTIONS[sts.exception_code])
return -1
return int(sts.registers[register])
##
## Sensor Type to Function mapping
TYPE_TO_FUNCTIONS_MAP = {
'modbus-memory': {
'read': modbus_bit_read,
'write': modbus_bit_write
},
'modbus-input': {
'read': modbus_input_read
},
'modbus-analog': {
'read': modbus_analog_read
}
}
##
##
## Sensor Activity Function
## THIS FUNCTION MUST BE CALLED WHILE HOLDING THE modbusQueueLock TO PREVENT PROBLEMS
## i.e.
## with modbusQueueLock:
## sensor_activity(...)...
## blah..
def sensor_activity(sensor, instruction, data=None):
if sensor == None:
print "sensor_activity: request for action on non-existent sensor"
mqttc.publish(SYS_MESSAGE_QUEUE, payload="sensor_activity; request for action on non-existent sensor")
return -1
if instruction not in [ 'init', 'read', 'write' ]:
print "sensor_activity: no comprehension of instruction: " + str(instruction)
return -1
if instruction == 'init':
run_function = TYPE_TO_FUNCTIONS_MAP[sensor['type']]['read']
if sensor['init'] == 'current':
#print str(run_function)
if 'register' in sensor:
status = run_function( sensor['address'], register=sensor['register'] )
else:
status = run_function( sensor['address'] )
sensor['status'] = status
#print "Status = " + str(status)
if sensor['init'] == 'default':
run_function = TYPE_TO_FUNCTIONS_MAP[sensor['type']]['write']
if 'register' in sensor:
status = run_function(sensor['address'], register=sensor['register'] )
else:
status = run_function(sensor['address'])
return 0
run_function = TYPE_TO_FUNCTIONS_MAP[sensor['type']][instruction]
if instruction == 'write':
status = run_function( sensor['address'], data=data )
else:
if 'register' in sensor:
ret = run_function(sensor['address'], register=sensor['register'] )
# analog sensors need to return, not just the value, but the min, max, and output transform.
status = str(ret) + " " + str(sensor['sensorMin']) + " " + str(sensor['sensorMax']) + " " + str(sensor['outputMax'])
else:
status = run_function(sensor['address'])
#print "Status = " + str(status)
return status
##
##
## Thread run() functions
def mqttManager():
mqttc.loop_forever()
def commandManager():
lcl = threading.local()
while 1:
time.sleep(5)
if len(messageQueue) != 0:
with messageQueueLock:
lcl.msg = messageQueue.popleft()
if lcl.msg['sensor'] not in sensors:
lcl.notice = "Received message for non-existant sensor: " + lcl.msg['sensor'] + "... Discarding."
#print lcl.notice
mqttc.publish(SYS_MESSAGE_QUEUE, payload=lcl.notice)
continue
if lcl.msg['action'] == 'control' and 'write' in sensors[lcl.msg['sensor']]['access']:
if lcl.msg['msg'] == 'on':
with modbusQueueLock:
sensor_activity(sensors[lcl.msg['sensor']], 'write', 1)
sensors[lcl.msg['sensor']]['status'] = '1'
mqttc.publish(BASE + "/" + lcl.msg['sensor'] + "/status", payload='1')
elif lcl.msg['msg'] == 'off':
with modbusQueueLock:
sensor_activity(sensors[lcl.msg['sensor']], 'write', 0)
sensors[lcl.msg['sensor']]['status'] = '0'
mqttc.publish(BASE + "/" + lcl.msg['sensor'] + "/status", payload='0')
elif lcl.msg['msg'] == 'status':
mqttc.publish(BASE + "/" + lcl.msg['sensor'] + "/status", payload=str(sensors[lcl.msg['sensor']]['status']))
else:
lcl.notice= "Received invalid instruction '" + lcl.msg['msg'] + "' for sensor: " + lcl.msg['sensor'] + "... Discarding."
mqttc.publish(SYS_MESSAGE_QUEUE, payload=lcl.notice)
def statusManager():
lcl = threading.local()
lcl.sensors_to_status = []
print "Queueing Sensors for statusing..."
for sensor in sensors:
if 'status-update' in sensors[sensor]:
lcl.sensors_to_status.append(sensor)
print " Added: " + str(sensor)
while 1:
for sensor in lcl.sensors_to_status:
with modbusQueueLock:
lcl.status = sensor_activity(sensors[sensor], 'read')
sensors[sensor]['status'] = lcl.status
mqttc.publish(BASE + "/" + str(sensor) + "/status", payload=str(lcl.status) )
time.sleep (STATUS_INTERVAL)
##
##
## Main
##
## Share Data
messageQueue = deque([])
messageQueueLock = threading.RLock()
modbusQueueLock = threading.RLock()
tMqttManager = threading.Thread(name='tMqttmanager', target=mqttManager)
tCommandManager = threading.Thread(name='tCommandManager', target=commandManager)
tStatusManager = threading.Thread(name='tStatusManager', target=statusManager)
tMqttManager.daemon = True
tCommandManager.daemon = True
tStatusManager.daemon = True
##
## Setup MQTT
print "Setting up MQTT handlers..."
mqttc = mqtt.Client()
mqttc.on_connect = mqtt_on_connect
mqttc.on_message = mqtt_on_message
mqttc.on_disconnect = mqtt_on_disconnect
mqttc.connect(MQTT_SERVER)
mqttc.subscribe(SUB)
tMqttManager.start()
time.sleep(1)
##
## Setup ModBus
print "Setting up Modbus handlers..."
modbusHandle = ModbusClient(MODBUS_TYPE, port=MODBUS_PORT, baudrate=MODBUS_BAUDRATE, unit_id=MODBUS_UNITID)
if modbusHandle.connect() == False:
mqttc.publish(SYS_MESSAGE_QUEUE, payload="ERR_FATAL: Failed to start ModBus connection")
exit()
else:
print "ModBus Connection Established"
##
## Initialise sensor data structures
print "Initialising Sensors..."
for sensor in sensors:
#print ("{0}: {1}").format(sensor, sensors[sensor]['type'])
with modbusQueueLock:
sensor_activity(sensors[sensor], 'init')
##
## Starting Command Manager
print "Starting CommandManager Thread..."
tCommandManager.start()
##
## Kick off Status_manager
print "Starting StatusManager Thread..."
tStatusManager.start()
time.sleep(1)
print "Ready!"
while 1:
time.sleep(300)
print "--MARK--"
| gpl-3.0 | -7,693,289,409,988,868,000 | 27.789731 | 140 | 0.579788 | false | 3.530735 | false | false | false |
suselrd/django-content-interactions | content_interactions/models.py | 1 | 7983 | # coding=utf-8
from django.conf import settings
from django.dispatch import receiver
try:
from django.contrib.contenttypes.fields import GenericForeignKey
except ImportError:
from django.contrib.contenttypes.generic import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.core import urlresolvers
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ValidationError
from django.utils.encoding import python_2_unicode_compatible
from social_graph import Graph
from managers import CommentManager, CommentCurrentSiteManager
from mixins import author_edge, target_edge
from signals import item_commented, item_comment_removed
graph = Graph()
class BaseCommentAbstractModel(models.Model):
"""
An abstract base class that any custom comment models probably should
subclass.
"""
# Content-object field
content_type = models.ForeignKey(ContentType,
verbose_name=_(u'content type'),
related_name="content_type_set_for_%(class)s")
object_pk = models.TextField(_(u'object ID'))
content_object = GenericForeignKey(ct_field="content_type", fk_field="object_pk")
# Metadata about the comment
site = models.ForeignKey(Site)
class Meta:
abstract = True
def validate_level(value):
if value > settings.COMMENT_MAX_LEVELS:
raise ValidationError(_('Max comment level exceeded.'))
@python_2_unicode_compatible
class Comment(BaseCommentAbstractModel):
"""
A user comment about some object.
"""
# Who posted this comment? If ``user`` is set then it was an authenticated
# user; otherwise at least user_name should have been set and the comment
# was posted by a non-authenticated user.
user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_(u'user'),
blank=True, null=True, related_name="%(class)s_comments")
user_name = models.CharField(_(u"name"), max_length=50, blank=True)
# Explicit `max_length` to apply both to Django 1.7 and 1.8+.
user_email = models.EmailField(_(u"email"), max_length=254,
blank=True)
user_url = models.URLField(_(u"user's URL"), blank=True)
comment = models.TextField(_(u'comment'), max_length=settings.COMMENT_MAX_LENGTH)
answer_to = models.ForeignKey(
'self', verbose_name=_(u'answer to'), related_name='answers', blank=True, null=True
)
level = models.IntegerField(_(u'comment level'), blank=True, null=True, validators=[validate_level])
# Metadata about the comment
submit_date = models.DateTimeField(_(u'date/time submitted'), auto_now_add=True)
ip_address = models.GenericIPAddressField(_(u'IP address'), unpack_ipv4=True, blank=True, null=True)
is_public = models.BooleanField(_(u'is public'), default=True,
help_text=_(u'Uncheck this box to make the comment effectively '
u'disappear from the site.'))
is_removed = models.BooleanField(_(u'is removed'), default=False,
help_text=_(u'Check this box if the comment is inappropriate. '
u'A "This comment has been removed" message will '
u'be displayed instead.'))
# Manager
objects = CommentManager()
on_site = CommentCurrentSiteManager()
historical = models.Manager()
class Meta:
ordering = ('submit_date',)
permissions = [("can_moderate", "Can moderate comments")]
verbose_name = _(u'comment')
verbose_name_plural = _(u'comments')
def __str__(self):
return "%s: %s..." % (self.name, self.comment[:50])
@property
def user_info(self):
"""
Get a dictionary that pulls together information about the poster
safely for both authenticated and non-authenticated comments.
This dict will have ``name``, ``email``, and ``url`` fields.
"""
if not hasattr(self, "_user_info"):
user_info = {
"name": self.user_name,
"email": self.user_email,
"url": self.user_url
}
if self.user_id:
u = self.user
if u.email:
user_info["email"] = u.email
# If the user has a full name, use that for the user name.
# However, a given user_name overrides the raw user.username,
# so only use that if this comment has no associated name.
if u.get_full_name():
user_info["name"] = self.user.get_full_name()
elif not self.user_name:
user_info["name"] = u.get_username()
self._user_info = user_info
return self._user_info
def _get_name(self):
return self.user_info["name"]
def _set_name(self, val):
if self.user_id:
raise AttributeError(_(u"This comment was posted by an authenticated "
u"user and thus the name is read-only."))
self.user_name = val
name = property(_get_name, _set_name, doc="The name of the user who posted this comment")
def _get_email(self):
return self.user_info["email"]
def _set_email(self, val):
if self.user_id:
raise AttributeError(_(u"This comment was posted by an authenticated "
u"user and thus the email is read-only."))
self.user_email = val
email = property(_get_email, _set_email, doc="The email of the user who posted this comment")
def _get_url(self):
return self.userinfo["url"]
def _set_url(self, val):
self.user_url = val
url = property(_get_url, _set_url, doc="The URL given by the user who posted this comment")
def get_as_text(self):
"""
Return this comment as plain text. Useful for emails.
"""
data = {
'user': self.user or self.name,
'date': self.submit_date,
'comment': self.comment,
'domain': self.site.domain,
'url': self.get_absolute_url()
}
return _(u'Posted by %(user)s at %(date)s\n\n%(comment)s\n\nhttp://%(domain)s%(url)s') % data
def delete(self, using=None):
for answer in self.answers.all():
answer.delete()
self.is_removed = True
self.save()
@receiver(models.signals.pre_save, sender=Comment, dispatch_uid="fill_comment_user_data")
def fill_comment_user_data(instance, **kwargs):
if not instance.user_name or not instance.user_email:
instance.user_name = instance.user.get_full_name() or instance.user.username
instance.user_email = instance.user.email
if not instance.level:
instance.level = instance.answer_to.level + 1 if instance.answer_to else 1
@receiver(models.signals.post_save, sender=Comment, dispatch_uid="manage_comment_edges")
def create_comment_edges(instance, created, **kwargs):
if created:
if instance.user:
graph.edge(instance.user, instance, author_edge(), instance.site, {})
graph.edge(instance, instance.content_object, target_edge(), instance.site, {})
item_commented.send(sender=Comment, instance=instance, user=instance.user, answer_to=instance.answer_to)
elif instance.is_removed:
if instance.user:
graph.no_edge(instance.user, instance, author_edge(), instance.site)
graph.no_edge(instance, instance.content_object, target_edge(), instance.site)
item_comment_removed.send(
sender=Comment, instance=instance, user=instance.content_object.get_comments_manager() or instance.user
) | bsd-3-clause | -2,322,210,936,722,491,000 | 37.946341 | 115 | 0.617312 | false | 4.142709 | false | false | false |
sjug/origin | vendor/github.com/heketi/heketi/extras/tools/comparison.py | 8 | 10218 | #!/usr/bin/env python
#
# Copyright (c) 2018 The heketi Authors
#
# This file is licensed to you under your choice of the GNU Lesser
# General Public License, version 3 or any later version (LGPLv3 or
# later), or the GNU General Public License, version 2 (GPLv2), in all
# cases as published by the Free Software Foundation.
#
import argparse
import json
import sys
import yaml
DESC = """
Compare outputs of gluster and/or heketi and/or openshift/k8s.
Prints lists of volumes where sources differ.
"""
EXAMPLE = """
Example:
$ python3 comparison.py
--gluster-info gluster-volume-info.txt
--heketi-json heketi-db.json
--pv-yaml openshift-pv-yaml.yaml
"""
# flag constants
IN_GLUSTER = 'gluster'
IN_HEKETI = 'heketi'
IN_PVS = 'pvs'
IS_BLOCK = 'BV'
class CliError(ValueError):
pass
def main():
parser = argparse.ArgumentParser(description=DESC, epilog=EXAMPLE)
parser.add_argument(
'--gluster-info', '-g',
help='Path to a file containing gluster volume info')
parser.add_argument(
'--heketi-json', '-j',
help='Path to a file containing Heketi db json export')
parser.add_argument(
'--pv-yaml', '-y',
help='Path to a file containing PV yaml data')
parser.add_argument(
'--skip-ok', '-K', action='store_true',
help='Exclude matching items from output')
parser.add_argument(
'--pending', action='store_true',
help='Show heketi pending status (best effort)')
parser.add_argument(
'--no-header', '-H', action='store_true',
help='Do not print column header')
parser.add_argument(
'--ignore', '-I', action='append',
help='Exlude given volume name (multiple allowed)')
parser.add_argument(
'--match-storage-class', '-S', action='append',
help='Match one or more storage class names')
parser.add_argument(
'--skip-block', action='store_true',
help='Exclude block volumes from output')
parser.add_argument(
'--bricks', action='store_true',
help='Compare bricks rather than volumes')
cli = parser.parse_args()
try:
if cli.bricks:
return examine_bricks(cli)
return examine_volumes(cli)
except CliError as err:
parser.error(str(err))
def examine_volumes(cli):
check = []
gvinfo = heketi = pvdata = None
if cli.gluster_info:
check.append(IN_GLUSTER)
gvinfo = parse_gvinfo(cli.gluster_info)
if cli.heketi_json:
check.append(IN_HEKETI)
heketi = parse_heketi(cli.heketi_json)
if cli.pv_yaml:
check.append(IN_PVS)
pvdata = parse_oshift(cli.pv_yaml)
if not check:
raise CliError(
"Must provide: --gluster-info OR --heketi-json OR --pv-yaml")
summary = compile_summary(cli, gvinfo, heketi, pvdata)
for ign in (cli.ignore or []):
if summary.pop(ign, None):
sys.stderr.write('ignoring: {}\n'.format(ign))
compare(summary, check, cli.skip_ok,
header=(not cli.no_header),
show_pending=(cli.pending),
skip_block=cli.skip_block)
return
def examine_bricks(cli):
check = []
gvinfo = heketi = None
if cli.gluster_info:
check.append(IN_GLUSTER)
gvinfo = parse_gvinfo(cli.gluster_info)
if cli.heketi_json:
check.append(IN_HEKETI)
heketi = parse_heketi(cli.heketi_json)
if not check:
raise CliError(
"Must provide: --gluster-info and --heketi-json")
summary = compile_brick_summary(cli, gvinfo, heketi)
compare_bricks(summary, check,
skip_ok=cli.skip_ok)
def parse_heketi(h_json):
with open(h_json) as fh:
return json.load(fh)
def parse_oshift(yf):
with open(yf) as fh:
return yaml.safe_load(fh)
def parse_gvlist(gvl):
vols = {}
with open(gvl) as fh:
for line in fh:
vols[line.strip()] = []
return vols
def parse_gvinfo(gvi):
vols = {}
volume = None
with open(gvi) as fh:
for line in fh:
l = line.strip()
if l.startswith("Volume Name:"):
volume = l.split(":", 1)[-1].strip()
vols[volume] = []
if l.startswith('Brick') and l != "Bricks:":
if volume is None:
raise ValueError("Got Brick before volume: %s" % l)
vols[volume].append(l.split(":", 1)[-1].strip())
return vols
def compile_heketi(summary, heketi):
for vid, v in heketi['volumeentries'].items():
n = v['Info']['name']
summary[n] = {'id': vid, IN_HEKETI: True}
if v['Pending']['Id']:
summary[n]['heketi-pending'] = True
if v['Info'].get('block'):
summary[n]['heketi-bhv'] = True
for bvid, bv in heketi['blockvolumeentries'].items():
n = bv['Info']['name']
summary[n] = {
IN_HEKETI: True,
'block': True,
'id': bvid,
}
if bv['Pending']['Id']:
summary[n]['heketi-pending'] = True
def compile_heketi_bricks(summary, heketi):
for bid, b in heketi['brickentries'].items():
path = b['Info']['path']
node_id = b['Info']['node']
vol_id = b['Info']['volume']
host = (heketi['nodeentries'][node_id]
['Info']['hostnames']['storage'][0])
vol_name = heketi['volumeentries'][vol_id]['Info']['name']
fbp = '{}:{}'.format(host, path)
dest = summary.setdefault(fbp, {})
dest[IN_HEKETI] = True
dest['heketi_volume'] = vol_name
def compile_gvinfo(summary, gvinfo):
for vn in gvinfo:
if vn in summary:
summary[vn][IN_GLUSTER] = True
else:
summary[vn] = {IN_GLUSTER: True}
def compile_gvinfo_bricks(summary, gvinfo):
for vn, content in gvinfo.items():
for bn in content:
dest = summary.setdefault(bn, {})
dest[IN_GLUSTER] = True
dest['gluster_volume'] = vn
def compile_pvdata(summary, pvdata, matchsc):
for elem in pvdata['items']:
g = elem.get('spec', {}).get('glusterfs', {})
ma = elem.get('metadata', {}).get('annotations', {})
if not g and 'glusterBlockShare' not in ma:
continue
sc = elem.get('spec', {}).get('storageClassName', '')
if matchsc and sc not in matchsc:
sys.stderr.write(
'ignoring: {} from storage class "{}"\n'.format(g["path"], sc))
continue
if 'path' in g:
vn = g['path']
block = False
elif 'glusterBlockShare' in ma:
vn = ma['glusterBlockShare']
block = True
else:
raise KeyError('path (volume name) not found in PV data')
dest = summary.setdefault(vn, {})
dest[IN_PVS] = True
if block:
dest['block'] = True
def compile_summary(cli, gvinfo, heketi, pvdata):
summary = {}
if heketi:
compile_heketi(summary, heketi)
if gvinfo:
compile_gvinfo(summary, gvinfo)
if pvdata:
compile_pvdata(summary, pvdata, matchsc=cli.match_storage_class)
return summary
def compile_brick_summary(cli, gvinfo, heketi):
summary = {}
if gvinfo:
compile_gvinfo_bricks(summary, gvinfo)
if heketi:
compile_heketi_bricks(summary, heketi)
return summary
def _check_item(vname, vstate, check):
tocheck = set(check)
flags = []
if vstate.get('block'):
flags.append(IS_BLOCK)
# block volumes will never be found in gluster info
tocheck.discard(IN_GLUSTER)
m = set(c for c in tocheck if vstate.get(c))
flags.extend(sorted(m))
return m == tocheck, flags
def compare(summary, check, skip_ok=False, header=True, show_pending=False,
skip_block=False):
if header:
_print = Printer(['Volume-Name', 'Match', 'Volume-ID'])
else:
_print = Printer([])
for vn, vs in summary.items():
ok, flags = _check_item(vn, vs, check)
if ok and skip_ok:
continue
if 'BV' in flags and skip_block:
continue
heketi_info = vs.get('id', '')
if show_pending and vs.get('heketi-pending'):
heketi_info += '/pending'
if vs.get('heketi-bhv'):
heketi_info += '/block-hosting'
if ok:
sts = 'ok'
else:
sts = ','.join(flags)
_print.line(vn, sts, heketi_info)
def _check_brick(bpath, bstate, check):
tocheck = set(check)
flags = []
volumes = []
m = set(c for c in tocheck if bstate.get(c))
flags.extend(sorted(m))
gv = bstate.get('gluster_volume')
hv = bstate.get('heketi_volume')
ok = False
if m == tocheck and gv == hv:
ok = True
volumes = ['match={}'.format(gv)]
else:
if gv:
volumes.append('gluster={}'.format(gv))
if hv:
volumes.append('heketi={}'.format(hv))
return ok, flags, volumes
def compare_bricks(summary, check, header=True, skip_ok=False):
if header:
_print = Printer(['Brick-Path', 'Match', 'Volumes'])
else:
_print = Printer([])
for bp, bstate in summary.items():
ok, flags, volumes = _check_brick(bp, bstate, check)
if ok and skip_ok:
continue
if ok:
sts = 'ok'
else:
sts = ','.join(flags)
_print.line(bp, sts, ','.join(volumes))
class Printer(object):
"""Utility class for printing columns w/ headers."""
def __init__(self, header):
self._did_header = False
self.header = header or []
def line(self, *columns):
if self.header and not self._did_header:
self._print_header(columns)
self._did_header = True
print (' '.join(columns))
def _print_header(self, columns):
parts = []
for idx, hdr in enumerate(self.header):
pad = max(0, len(columns[idx]) - len(hdr))
parts.append('{}{}'.format(hdr, ' ' * pad))
print (' '.join(parts))
if __name__ == '__main__':
main()
| apache-2.0 | -2,578,136,947,513,737,000 | 27.946176 | 79 | 0.563613 | false | 3.455529 | false | false | false |
pixelated/pixelated-dispatcher | pixelated/provider/docker/pixelated_adapter.py | 2 | 1901 | #
# Copyright (c) 2014 ThoughtWorks Deutschland GmbH
#
# Pixelated is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pixelated is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Pixelated. If not, see <http://www.gnu.org/licenses/>.
from pixelated.provider.docker.adapter import DockerAdapter
__author__ = 'fbernitt'
class PixelatedDockerAdapter(DockerAdapter):
PIXELATED_PORT = 4567
def __init__(self, provider_hostname):
self.provider_hostname = provider_hostname
def app_name(self):
return 'pixelated'
def docker_image_name(self):
return 'pixelated/pixelated-user-agent'
def run_command(self, leap_provider_x509):
extra_args = ""
if leap_provider_x509.has_ca_bundle():
extra_args = ' --leap-provider-cert /mnt/user/dispatcher-leap-provider-ca.crt'
if leap_provider_x509.has_fingerprint():
extra_args = ' --leap-provider-cert-fingerprint %s' % leap_provider_x509.fingerprint
return '/bin/bash -l -c "/usr/bin/pixelated-user-agent --leap-home /mnt/user --host 0.0.0.0 --port 4567 --organization-mode%s"' % extra_args
def setup_command(self):
return '/bin/true'
def port(self):
return self.PIXELATED_PORT
def environment(self, data_path):
return {
'DISPATCHER_LOGOUT_URL': '/auth/logout',
'FEEDBACK_URL': 'https://%s/tickets' % self.provider_hostname
}
| agpl-3.0 | -3,948,492,075,615,778,000 | 34.203704 | 148 | 0.685429 | false | 3.705653 | false | false | false |
BaReinhard/Hacktoberfest-Data-Structure-and-Algorithms | algorithms/dfs/python/Dfs.py | 1 | 1315 | __author__ = 'mittr'
# Python program to print DFS traversal from a
# given given graph
from collections import defaultdict
# This class represents a directed graph using
# adjacency list representation
class Graph:
# Constructor
def __init__(self):
# default dictionary to store graph
self.graph = defaultdict(list)
# function to add an edge to graph
def addEdge(self,u,v):
self.graph[u].append(v)
# A function used by DFS
def DFSUtil(self,v,visited):
# Mark the current node as visited and print it
visited[v]= True
print (v),
# Recur for all the vertices adjacent to this vertex
for i in self.graph[v]:
if visited[i] == False:
self.DFSUtil(i, visited)
# The function to do DFS traversal. It uses
# recursive DFSUtil()
def DFS(self,v):
# Mark all the vertices as not visited
visited = [False]*(len(self.graph))
# Call the recursive helper function to print
# DFS traversal
self.DFSUtil(v,visited)
# Driver code
# Create a graph given in the above diagram
g = Graph()
g.addEdge(0, 1)
g.addEdge(0, 2)
g.addEdge(1, 2)
g.addEdge(2, 0)
g.addEdge(2, 3)
g.addEdge(3, 3)
print("Following is DFS from (starting from vertex 2)")
g.DFS(2)
| gpl-3.0 | -330,548,148,491,065,340 | 22.482143 | 60 | 0.631179 | false | 3.534946 | false | false | false |
sathia-dev/simwf | src/wf/processor.py | 1 | 5391 | '''
Created on Sep 4, 2017
@author: tskthilak
'''
import logging
from wf.model import WfSpecification, WfJob, WfTask
from wf.worker import cb_run_job
from wf.util import encode, decode
from wf.error import WfJobError, TaskNotFoundError
from cPickle import dumps
from rq import Queue
from wf.store import Store
from rq.job import Job, NoSuchJobError
from time import time
LOG = logging.getLogger(__name__)
logging.basicConfig(level=logging.DEBUG)
logging.basicConfig(format="%(asctime)s;%(levelname)s;%(message)s")
class QueueName():
A1 = 'high'
B1 = 'normal'
C1 = 'low'
class Processor(object):
'''
classdocs
'''
def __init__(self, db_conn):
self._db_conn = db_conn
self._queue = {}
def worker(self, name):
if not self._queue.has_key(name):
self._queue[name] = Queue(connection=self._db_conn)
return self._queue[name]
def deserialize_spec(self, mime, data):
retobj = WfSpecification.deserialize(mime, data)
#LOG.debug("data : %s" % (data))
#LOG.debug("object : %s" % (dumps(retobj)))
return retobj
def serialize_spec(self, mime, spec):
retdata = spec.serialize(mime)
#LOG.debug("object : %s" % (dumps(spec.wfspec)))
#LOG.debug("data : %s" % (retdata))
return retdata
def deserialize_job(self, mime, data):
retobj = WfJob.deserialize(mime, data)
#LOG.debug("data : %s" % (data))
#LOG.debug("object : %s" % (dumps(retobj.wfinst)))
return retobj
def serialize_job(self, mime, job):
retdata = job.serialize(mime)
#LOG.debug("object : %s" % (dumps(job.wfinst)))
#LOG.debug("data : %s" % (retdata))
return retdata
def job_run(self, jid, data):
'''
current state operation next state
started [auto] running
started cancel cancelled
started restart started
running [auto] waiting | completed
running cancel cancelled
running restart started
waiting [auto] running | (timeout) cancelled
waiting cancel cancelled
waiting restart started
cancelled and completed are terminal states,
any operation would result in error.
'''
meta = {}
meta['debug'] = False
meta['submitted_at'] = time()
meta['data'] = data
LOG.debug("job {0} has meta_data as {1}".format(jid, data))
qjob = self.worker(QueueName.B1).enqueue(cb_run_job, encode(jid), job_id=str(jid), meta=meta)
return qjob.id
def _retreive_qjob(self, jid):
LOG.debug("jid=%s" % (jid))
job = Job(id=jid, connection=self._db_conn)
if job is None: raise ValueError("invalid job id")
try:
job.refresh()
except NoSuchJobError:
raise ValueError("invalid job id")
return job
def job_load(self, jid):
LOG.debug("loading jid=%s" % (jid))
#TODO - if the state is not eventually updated by worker, this will load the initial state
job_details = self._db_conn.load_job(jid)
job = WfJob.deserialize('application/json', job_details)
job.set_data("id", jid)
return job
def job_status(self, jid):
'''
' for the given job-id
' reports the rq job status
' reports the wf job tasks' status
' {
' "status" : <rq_job_status>,
' "tasks" :
' [
' {"id":<task_id>,"name":<task_name>,"status":<task_status>},
' ...
' ]
' }
'''
status = {}
status['status'] = self._retreive_qjob(jid).status
#TODO: see the bug mentioned in job_load()
wfjob = self.job_load(jid)
task_list = wfjob.task_list()
status['tasks'] = []
for tid in task_list:
try:
task = WfTask.deserialize(wfjob, self._db_conn.load_task(tid))
status['tasks'].append({
'id':task.id,
'name':task.get_name(),
'status':task.status})
except TaskNotFoundError:
LOG.debug("task {0} of job {1} is not found.".format(tid, jid))
return status
def job_result(self, jid):
'''
' for the given job-id
' reports the rq job result
' that was returned by the rq job callback method
' which usually is the wf job task tree
'''
return self._retreive_qjob(jid).result
def job_cancel(self, jid):
qjob = self._retreive_qjob(jid)
if qjob.get_status() in ["finished", "failed"]:
raise WfJobError("invalid operation")
qjob.meta['cancel_requested'] = True
qjob.meta['cancel_requested_at'] = time()
qjob.save_meta()
qjob.save()
return jid
def job_restart(self, jid):
qjob = self._retreive_qjob(jid)
qjob.cancel()
qjob.delete()
return self.job_run(jid)
| gpl-3.0 | -4,083,018,877,632,404,500 | 31.07362 | 101 | 0.527731 | false | 3.864516 | false | false | false |
sfromm/ansible-commander | lib/main/management/commands/acom_inventory.py | 1 | 4681 | #!/usr/bin/env python
# (c) 2013, AnsibleWorks
#
# This file is part of Ansible Commander
#
# Ansible Commander is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible Commander is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible Commander. If not, see <http://www.gnu.org/licenses/>.
import json
from optparse import make_option
import os
import sys
from django.core.management.base import NoArgsCommand, CommandError
class Command(NoArgsCommand):
help = 'Ansible Commander Inventory script'
option_list = NoArgsCommand.option_list + (
make_option('-i', '--inventory', dest='inventory', type='int', default=0,
help='Inventory ID (can also be specified using '
'ACOM_INVENTORY environment variable)'),
make_option('--list', action='store_true', dest='list', default=False,
help='Return JSON hash of host groups.'),
make_option('--host', dest='host', default='',
help='Return JSON hash of host vars.'),
make_option('--indent', dest='indent', type='int', default=None,
help='Indentation level for pretty printing output'),
)
def get_list(self, inventory, indent=None):
groups = {}
for group in inventory.groups.all():
# FIXME: Check if group is active?
group_info = {
'hosts': list(group.hosts.values_list('name', flat=True)),
# FIXME: Include host vars here?
'vars': dict(group.variable_data.values_list('name', 'data')),
'children': list(group.children.values_list('name', flat=True)),
}
group_info = dict(filter(lambda x: bool(x[1]), group_info.items()))
if group_info.keys() in ([], ['hosts']):
groups[group.name] = group_info.get('hosts', [])
else:
groups[group.name] = group_info
self.stdout.write(json.dumps(groups, indent=indent))
def get_host(self, inventory, hostname, indent=None):
from lib.main.models import Host
hostvars = {}
try:
# FIXME: Check if active?
host = inventory.hosts.get(name=hostname)
except Host.DoesNotExist:
raise CommandError('Host %s not found in the given inventory' % hostname)
hostvars = dict(host.variable_data.values_list('name', 'data'))
# FIXME: Do we also need to include variables defined for groups of which
# this host is a member?
self.stdout.write(json.dumps(hostvars, indent=indent))
def handle_noargs(self, **options):
from lib.main.models import Inventory
try:
inventory_id = int(os.getenv('ACOM_INVENTORY', options.get('inventory', 0)))
except ValueError:
raise CommandError('Inventory ID must be an integer')
if not inventory_id:
raise CommandError('No inventory ID specified')
try:
inventory = Inventory.objects.get(id=inventory_id)
except Inventory.DoesNotExist:
raise CommandError('Inventory with ID %d not found' % inventory_id)
list_ = options.get('list', False)
host = options.get('host', '')
indent = options.get('indent', None)
if list_ and host:
raise CommandError('Only one of --list or --host can be specified')
elif list_:
self.get_list(inventory, indent=indent)
elif host:
self.get_host(inventory, host, indent=indent)
else:
self.stderr.write('Either --list or --host must be specified')
self.print_help()
if __name__ == '__main__':
# FIXME: This environment variable *should* already be set if this script
# is called from a celery task. Probably won't work otherwise.
try:
import lib.settings
except ImportError:
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..', '..')))
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'lib.settings')
from django.core.management import execute_from_command_line
argv = [sys.argv[0], 'acom_inventory'] + sys.argv[1:]
execute_from_command_line(argv)
| gpl-3.0 | 1,836,607,627,473,570,800 | 42.342593 | 108 | 0.621876 | false | 4.198206 | false | false | false |
indictools/scan2text | vedavaapi/scripts/run.py | 3 | 1496 | import logging
import os, sys, getopt
CODE_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
# Add parent directory to PYTHONPATH, so that vedavaapi_py_api module can be found.
sys.path.append(CODE_ROOT)
print(sys.path)
from vedavaapi_py_api import run
from sanskrit_data.db.implementations import mongodb
from sanskrit_data.schema.common import JsonObject
logging.basicConfig(
level=logging.DEBUG,
format="%(levelname)s: %(asctime)s {%(filename)s:%(lineno)d}: %(message)s "
)
REPO_ROOT = os.path.join(CODE_ROOT, "textract-example-repo")
def dump_db(dest_dir=os.path.join(REPO_ROOT, "books_v2")):
from vedavaapi_py_api.ullekhanam.backend import get_db
db = get_db(db_name_frontend="ullekhanam_test")
logging.debug(db.list_books())
db.dump_books(dest_dir)
def import_db(db_name_frontend="ullekhanam_test_v2"):
from vedavaapi_py_api.ullekhanam.backend import get_db
db = get_db(db_name_frontend=db_name_frontend)
db.import_all(rootdir=db.external_file_store)
def main(argv):
def usage():
logging.info("run.py [--action dump]...")
exit(1)
params = JsonObject()
try:
opts, args = getopt.getopt(argv, "ha:", ["action="])
for opt, arg in opts:
if opt == '-h':
usage()
elif opt in ("-a", "--action"):
params.action = arg
except getopt.GetoptError:
usage()
if params.action == "dump":
dump_db()
elif params.action == "import":
import_db()
if __name__ == '__main__':
main(sys.argv[1:]) | gpl-3.0 | -7,592,547,539,747,782,000 | 24.810345 | 83 | 0.674465 | false | 2.944882 | false | false | false |
nikdoof/test-auth | app/groups/migrations/0001_initial.py | 1 | 7556 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'GroupInformation'
db.create_table('groups_groupinformation', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('group', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['auth.Group'], unique=True)),
('type', self.gf('django.db.models.fields.IntegerField')()),
('public', self.gf('django.db.models.fields.BooleanField')(default=False, blank=True)),
('requestable', self.gf('django.db.models.fields.BooleanField')(default=False, blank=True)),
('description', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal('groups', ['GroupInformation'])
# Adding M2M table for field admins on 'GroupInformation'
db.create_table('groups_groupinformation_admins', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('groupinformation', models.ForeignKey(orm['groups.groupinformation'], null=False)),
('user', models.ForeignKey(orm['auth.user'], null=False))
))
db.create_unique('groups_groupinformation_admins', ['groupinformation_id', 'user_id'])
# Adding model 'GroupRequest'
db.create_table('groups_grouprequest', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('group', self.gf('django.db.models.fields.related.ForeignKey')(related_name='requests', to=orm['auth.Group'])),
('user', self.gf('django.db.models.fields.related.ForeignKey')(related_name='grouprequests', to=orm['auth.User'])),
('reason', self.gf('django.db.models.fields.TextField')()),
('status', self.gf('django.db.models.fields.IntegerField')()),
('changed_by', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('changed_date', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('created_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
))
db.send_create_signal('groups', ['GroupRequest'])
def backwards(self, orm):
# Deleting model 'GroupInformation'
db.delete_table('groups_groupinformation')
# Removing M2M table for field admins on 'GroupInformation'
db.delete_table('groups_groupinformation_admins')
# Deleting model 'GroupRequest'
db.delete_table('groups_grouprequest')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80', 'unique': 'True'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '30', 'unique': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'groups.groupinformation': {
'Meta': {'object_name': 'GroupInformation'},
'admins': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'symmetrical': 'False'}),
'description': ('django.db.models.fields.TextField', [], {}),
'group': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.Group']", 'unique': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'requestable': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'type': ('django.db.models.fields.IntegerField', [], {})
},
'groups.grouprequest': {
'Meta': {'object_name': 'GroupRequest'},
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'requests'", 'to': "orm['auth.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reason': ('django.db.models.fields.TextField', [], {}),
'status': ('django.db.models.fields.IntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'grouprequests'", 'to': "orm['auth.User']"})
}
}
complete_apps = ['groups']
| bsd-3-clause | 8,985,021,805,985,671,000 | 64.137931 | 163 | 0.577687 | false | 3.81809 | false | false | false |
ProjexSoftware/projexui | projexui/widgets/xganttwidget/xganttwidgetitem.py | 2 | 25083 | #!/usr/bin/python
""" Defines a gantt widget item class for adding items to the widget. """
# define authorship information
__authors__ = ['Eric Hulser']
__author__ = ','.join(__authors__)
__credits__ = []
__copyright__ = 'Copyright (c) 2012, Projex Software'
__license__ = 'LGPL'
# maintenance information
__maintainer__ = 'Projex Software'
__email__ = 'team@projexsoftware.com'
#------------------------------------------------------------------------------
from projex.enum import enum
import projexui
import projex.dates
from projex.text import nativestring
from projexui.qt import wrapVariant
from projexui.qt.QtCore import QDate,\
QRectF,\
QSize,\
QTime,\
QDateTime,\
Qt
from projexui.qt.QtGui import QIcon
from projexui.widgets.xtreewidget import XTreeWidgetItem
from projexui.widgets.xganttwidget.xganttviewitem import XGanttViewItem
from projexui.widgets.xganttwidget.xganttdepitem import XGanttDepItem
from projexui.widgets.xganttwidget.xganttwidget import XGanttWidget
#------------------------------------------------------------------------------
class XGanttWidgetItem(XTreeWidgetItem):
"""
Defines the main widget item class that contains information for both the
tree and view widget items.
"""
ItemStyle = enum('Normal', 'Group', 'Milestone')
def __init__(self, ganttWidget):
super(XGanttWidgetItem, self).__init__()
# set default properties
self.setFixedHeight(ganttWidget.cellHeight())
for i in range(1, 20):
self.setTextAlignment(i, Qt.AlignCenter)
# define custom properties
self._blockedAdjustments = {}
self._viewItem = self.createViewItem()
self._dateStart = QDate.currentDate()
self._dateEnd = QDate.currentDate()
self._allDay = True
self._timeStart = QTime(0, 0, 0)
self._timeEnd = QTime(23, 59, 59)
self._name = ''
self._properties = {}
self._itemStyle = XGanttWidgetItem.ItemStyle.Normal
self._useGroupStyleWithChildren = True
self._dependencies = {}
self._reverseDependencies = {}
def addChild(self, item):
"""
Adds a new child item to this item.
:param item | <XGanttWidgetItem>
"""
super(XGanttWidgetItem, self).addChild(item)
item.sync()
def addDependency(self, item):
"""
Creates a dependency for this item to the next item. This item will
be treated as the source, the other as the target.
:param item | <QGanttWidgetItem>
"""
if item in self._dependencies:
return
viewItem = XGanttDepItem(self, item)
self._dependencies[item] = viewItem
item._reverseDependencies[self] = viewItem
self.syncDependencies()
def adjustmentsBlocked(self, key):
"""
Returns whether or not hierarchy adjustments are being blocked.
:param key | <str>
:return <bool>
"""
return self._blockedAdjustments.get(nativestring(key), False)
def adjustChildren(self, delta, secs=False):
"""
Shifts the children for this item by the inputed number of days.
:param delta | <int>
"""
if self.adjustmentsBlocked('children'):
return
if self.itemStyle() != self.ItemStyle.Group:
return
if not delta:
return
for c in range(self.childCount()):
child = self.child(c)
child.blockAdjustments('range', True)
if secs:
dstart = child.dateTimeStart()
dstart = dstart.addSecs(delta)
child.setDateStart(dstart.date())
child.setTimeStart(dstart.time())
else:
child.setDateStart(child.dateStart().addDays(delta))
child.blockAdjustments('range', False)
def adjustRange(self, recursive=True):
"""
Adjust the start and end ranges for this item based on the limits from
its children. This method will only apply to group items.
:param recursive | <bool>
"""
if ( self.adjustmentsBlocked('range') ):
return
if ( self.itemStyle() == self.ItemStyle.Group ):
dateStart = self.dateStart()
dateEnd = self.dateEnd()
first = True
for c in range(self.childCount()):
child = self.child(c)
if ( first ):
dateStart = child.dateStart()
dateEnd = child.dateEnd()
first = False
else:
dateStart = min(child.dateStart(), dateStart)
dateEnd = max(child.dateEnd(), dateEnd)
self._dateStart = dateStart
self._dateEnd = dateEnd
self.sync()
if ( self.parent() and recursive ):
self.parent().adjustRange(True)
def blockAdjustments(self, key, state):
"""
Blocks the inputed adjustments for the given key type.
:param key | <str>
state | <bool>
"""
self._blockedAdjustments[nativestring(key)] = state
def clearDependencies(self):
"""
Clears out all the dependencies from the scene.
"""
gantt = self.ganttWidget()
if ( not gantt ):
return
scene = gantt.viewWidget().scene()
for target, viewItem in self._dependencies.items():
target._reverseDependencies.pop(self)
scene.removeItem(viewItem)
self._dependencies.clear()
def createViewItem(self):
"""
Returns a new XGanttViewItem to use with this item.
:return <XGanttViewItem>
"""
return XGanttViewItem(self)
def dateEnd(self):
"""
Return the end date for this gantt item.
:return <QDate>
"""
return self._dateEnd
def dateStart(self):
"""
Return the start date for this gantt item.
:return <QDate>
"""
return self._dateStart
def dateTimeEnd(self):
"""
Returns a merging of data from the date end with the time end.
:return <QDateTime>
"""
return QDateTime(self.dateEnd(), self.timeEnd())
def dateTimeStart(self):
"""
Returns a merging of data from the date end with the date start.
:return <QDateTime>
"""
return QDateTime(self.dateStart(), self.timeStart())
def dependencies(self):
"""
Returns a list of all the dependencies linked with this item.
:return [<XGanttWidgetItem>, ..]
"""
return self._dependencies.keys()
def duration(self):
"""
Returns the number of days this gantt item represents.
:return <int>
"""
return 1 + self.dateStart().daysTo(self.dateEnd())
def ganttWidget(self):
"""
Returns the gantt widget that this item is linked to.
:return <XGanttWidget> || None
"""
tree = self.treeWidget()
if ( not tree ):
return None
from projexui.widgets.xganttwidget import XGanttWidget
return projexui.ancestor(tree, XGanttWidget)
def insertChild(self, index, item):
"""
Inserts a new item in the given index.
:param index | <int>
item | <XGanttWidgetItem>
"""
super(XGanttWidgetItem, self).insertChild(index, item)
item.sync()
def isAllDay(self):
"""
Returns whehter or not this item reflects an all day event.
:return <bool>
"""
return self._allDay
def itemStyle(self):
"""
Returns the item style information for this item.
:return <XGanttWidgetItem.ItemStyle>
"""
if ( self.useGroupStyleWithChildren() and self.childCount() ):
return XGanttWidgetItem.ItemStyle.Group
return self._itemStyle
def name(self):
"""
Returns the name for this gantt widget item.
:return <str>
"""
return self._name
def property(self, key, default=None):
"""
Returns the custom data that is stored on this object.
:param key | <str>
default | <variant>
:return <variant>
"""
if key == 'Name':
return self.name()
elif key == 'Start':
return self.dateStart()
elif key == 'End':
return self.dateEnd()
elif key == 'Calendar Days':
return self.duration()
elif key == 'Work Days':
return self.weekdays()
elif key == 'Time Start':
return self.timeStart()
elif key == 'Time End':
return self.timeEnd()
elif key == 'All Day':
return self.isAllDay()
else:
return self._properties.get(nativestring(key), default)
def removeFromScene(self):
"""
Removes this item from the view scene.
"""
gantt = self.ganttWidget()
if not gantt:
return
scene = gantt.viewWidget().scene()
scene.removeItem(self.viewItem())
for target, viewItem in self._dependencies.items():
target._reverseDependencies.pop(self)
scene.removeItem(viewItem)
def setAllDay(self, state):
"""
Sets whether or not this item is an all day event.
:param state | <bool>
"""
self._allDay = state
def setDateEnd(self, date):
"""
Sets the date start value for this item.
:param dateStart | <QDate>
"""
self._dateEnd = date
def setDateStart(self, date):
"""
Sets the date start value for this item.
:param dateStart | <QDate>
"""
self._dateStart = date
def setDateTimeEnd(self, dtime):
"""
Sets the endiing date time for this gantt chart.
:param dtime | <QDateTime>
"""
self._dateEnd = dtime.date()
self._timeEnd = dtime.time()
self._allDay = False
def setDateTimeStart(self, dtime):
"""
Sets the starting date time for this gantt chart.
:param dtime | <QDateTime>
"""
self._dateStart = dtime.date()
self._timeStart = dtime.time()
self._allDay = False
def setDuration(self, duration):
"""
Sets the duration for this item to the inputed duration.
:param duration | <int>
"""
if duration < 1:
return False
self.setDateEnd(self.dateStart().addDays(duration - 1))
return True
def setItemStyle(self, itemStyle):
"""
Sets the item style that will be used for this widget. If you are
trying to set a style on an item that has children, make sure to turn
off the useGroupStyleWithChildren option, or it will always display as
a group.
:param itemStyle | <XGanttWidgetItem.ItemStyle>
"""
self._itemStyle = itemStyle
# initialize the group icon for group style
if itemStyle == XGanttWidgetItem.ItemStyle.Group and \
self.icon(0).isNull():
ico = projexui.resources.find('img/folder_close.png')
expand_ico = projexui.resources.find('img/folder_open.png')
self.setIcon(0, QIcon(ico))
self.setExpandedIcon(0, QIcon(expand_ico))
def setName(self, name):
"""
Sets the name of this widget item to the inputed name.
:param name | <str>
"""
self._name = name
tree = self.treeWidget()
if tree:
col = tree.column('Name')
if col != -1:
self.setData(col, Qt.EditRole, wrapVariant(name))
def setProperty(self, key, value):
"""
Sets the custom property for this item's key to the inputed value. If
the widget has a column that matches the inputed key, then the value
will be added to the tree widget as well.
:param key | <str>
value | <variant>
"""
if key == 'Name':
self.setName(value)
elif key == 'Start':
self.setDateStart(value)
elif key == 'End':
self.setDateEnd(value)
elif key == 'Calendar Days':
self.setDuration(value)
elif key == 'Time Start':
self.setTimeStart(value)
elif key == 'Time End':
self.setTimeEnd(value)
elif key == 'All Day':
self.setAllDay(value)
elif key == 'Workadys':
pass
else:
self._properties[nativestring(key)] = value
tree = self.treeWidget()
if tree:
col = tree.column(key)
if col != -1:
self.setData(col, Qt.EditRole, wrapVariant(value))
def setTimeEnd(self, time):
"""
Sets the ending time that this item will use. To properly use a timed
item, you need to also set this item's all day property to False.
:sa setAllDay
:param time | <QTime>
"""
self._timeEnd = time
self._allDay = False
def setTimeStart(self, time):
"""
Sets the starting time that this item will use. To properly use a timed
item, you need to also set this item's all day property to False.
:sa setAllDay
:param time | <QTime>
"""
self._timeStart = time
self._allDay = False
def setUseGroupStyleWithChildren(self, state):
"""
Sets whether or not this item should display as group style when
it has children. This will override whatever is set in the style
property for the item.
:return <bool>
"""
self._useGroupStyleWithChildren = state
def sync(self, recursive=False):
"""
Syncs the information from this item to the tree and view.
"""
self.syncTree(recursive=recursive)
self.syncView(recursive=recursive)
def syncDependencies(self, recursive=False):
"""
Syncs the dependencies for this item to the view.
:param recurisve | <bool>
"""
scene = self.viewItem().scene()
if not scene:
return
visible = self.viewItem().isVisible()
depViewItems = self._dependencies.values()
depViewItems += self._reverseDependencies.values()
for depViewItem in depViewItems:
if not depViewItem.scene():
scene.addItem(depViewItem)
depViewItem.rebuild()
depViewItem.setVisible(visible)
if recursive:
for c in range(self.childCount()):
self.child(c).syncDependencies(recursive = True)
def syncTree(self, recursive=False, blockSignals=True):
"""
Syncs the information from this item to the tree.
"""
tree = self.treeWidget()
# sync the tree information
if not tree:
return
items = [self]
if recursive:
items += list(self.children(recursive=True))
if blockSignals and not tree.signalsBlocked():
blocked = True
tree.blockSignals(True)
else:
blocked = False
date_format = self.ganttWidget().dateFormat()
for item in items:
for c, col in enumerate(tree.columns()):
value = item.property(col, '')
item.setData(c, Qt.EditRole, wrapVariant(value))
if blocked:
tree.blockSignals(False)
def syncView(self, recursive=False):
"""
Syncs the information from this item to the view.
"""
# update the view widget
gantt = self.ganttWidget()
tree = self.treeWidget()
if not gantt:
return
vwidget = gantt.viewWidget()
scene = vwidget.scene()
cell_w = gantt.cellWidth()
tree_offset_y = tree.header().height() + 1
tree_offset_y += tree.verticalScrollBar().value()
# collect the items to work on
items = [self]
if recursive:
items += list(self.children(recursive=True))
for item in items:
# grab the view item from the gantt item
vitem = item.viewItem()
if not vitem.scene():
scene.addItem(vitem)
# make sure the item should be visible
if item.isHidden() or not tree:
vitem.hide()
continue
vitem.show()
tree_rect = tree.visualItemRect(item)
tree_y = tree_rect.y() + tree_offset_y
tree_h = tree_rect.height()
# check to see if this item is hidden
if tree_rect.height() == 0:
vitem.hide()
continue
if gantt.timescale() in (gantt.Timescale.Minute,
gantt.Timescale.Hour,
gantt.Timescale.Day):
dstart = item.dateTimeStart()
dend = item.dateTimeEnd()
view_x = scene.datetimeXPos(dstart)
view_r = scene.datetimeXPos(dend)
view_w = view_r - view_x
else:
view_x = scene.dateXPos(item.dateStart())
view_w = item.duration() * cell_w
# determine the % off from the length based on this items time
if not item.isAllDay():
full_day = 24 * 60 * 60 # full days worth of seconds
# determine the start offset
start = item.timeStart()
start_day = (start.hour() * 60 * 60)
start_day += (start.minute() * 60)
start_day += (start.second())
offset_start = (start_day / float(full_day)) * cell_w
# determine the end offset
end = item.timeEnd()
end_day = (end.hour() * 60 * 60)
end_day += (start.minute() * 60)
end_day += (start.second() + 1) # forces at least 1 sec
offset_end = ((full_day - end_day) / float(full_day))
offset_end *= cell_w
# update the xpos and widths
view_x += offset_start
view_w -= (offset_start + offset_end)
view_w = max(view_w, 5)
vitem.setSyncing(True)
vitem.setPos(view_x, tree_y)
vitem.setRect(0, 0, view_w, tree_h)
vitem.setSyncing(False)
# setup standard properties
flags = vitem.ItemIsSelectable
flags |= vitem.ItemIsFocusable
if item.flags() & Qt.ItemIsEditable:
flags |= vitem.ItemIsMovable
vitem.setFlags(flags)
item.syncDependencies()
def takeChild(self, index):
"""
Removes the child at the given index from this item.
:param index | <int>
"""
item = super(XGanttWidgetItem, self).takeChild(index)
if item:
item.removeFromScene()
return item
def takeDependency(self, item):
"""
Removes the dependency between the this item and the inputed target.
:param item | <XGanttWidgetItem>
"""
if ( not item in self._dependencies ):
return
item._reverseDependencies.pop(self)
viewItem = self._dependencies.pop(item)
scene = viewItem.scene()
if ( scene ):
scene.removeItem(viewItem)
def timeEnd(self):
"""
Returns the ending time that will be used for this item. If it is an
all day event, then the time returned will be 23:59:59.
:return <QTime>
"""
if ( self.isAllDay() ):
return QTime(23, 59, 59)
return self._timeEnd
def timeStart(self):
"""
Returns the starting time that will be used for this item. If it is
an all day event, then the time returned will be 0:0:0
:return <QTime>
"""
if ( self.isAllDay() ):
return QTime(0, 0, 0)
return self._timeStart
def useGroupStyleWithChildren(self):
"""
Returns whether or not this item should display as group style when
it has children. This will override whatever is set in the style
property for the item.
:return <bool>
"""
return self._useGroupStyleWithChildren
def viewChanged(self, start, end):
"""
Called when the view item is changed by the user.
:param start | <QDate> || <QDateTime>
end | <QDate> || <QDateTime>
"""
if type(start) == QDate:
delta = self._dateStart.daysTo(start)
self._dateStart = start
self._dateEnd = end
self.adjustChildren(delta)
else:
delta = self._dateStart.secsTo(start)
self._dateStart = start.date()
self._timeStart = start.time()
self._dateEnd = end.date()
self._timeEnd = end.time()
self.adjustChildren(delta, secs=True)
self.adjustRange()
self.syncDependencies()
self.syncTree()
def viewItem(self):
"""
Returns the view item that is linked with this item.
:return <XGanttViewItem>
"""
if type(self._viewItem).__name__ == 'weakref':
return self._viewItem()
return self._viewItem
def weekdays(self):
"""
Returns the number of weekdays this item has.
:return <int>
"""
if self.itemStyle() == self.ItemStyle.Group:
out = 0
for i in range(self.childCount()):
out += self.child(i).weekdays()
return out
else:
dstart = self.dateStart().toPyDate()
dend = self.dateEnd().toPyDate()
return projex.dates.weekdays(dstart, dend) | lgpl-3.0 | -9,046,884,588,321,437,000 | 30.219512 | 80 | 0.485428 | false | 4.637271 | true | false | false |
studyxiao/blog-site | user/views.py | 1 | 1801 | from django.shortcuts import render
from django.contrib.auth import authenticate, login
from django.http import HttpResponseRedirect, HttpResponse
from django.contrib.auth import logout
from .forms import UserForm, UserProfileForm
def register(request):
registered = False
if request.method == 'POST':
user_form = UserForm(data=request.POST)
profile_form = UserProfileForm(data=request.POST)
if user_form.is_valid() and profile_form.is_valid():
user = user_form.save()
user.set_password(user.password)
user.save()
profile = profile_form.save(commit=False)
profile.user = user
if 'portrait' in request.FILES:
profile.portrait = request.FILES['portrait']
profile.save()
registered = True
else:
print(user_form.errors)
print(profile_form.errors)
else:
user_form = UserForm()
profile_form = UserProfileForm()
return render(request,'user/register.html',
dict(user_form=user_form, profile_form=profile_form, registered=registered))
def user_login(request):
if request.method == 'POST':
username = request.POST.get('username')
password = request.POST.get('password')
user = authenticate(username=username, password=password)
if user:
if user.is_active:
login(request, user)
return HttpResponseRedirect('/')
else:
return HttpResponse('账户没有激活')
else:
return HttpResponse('账户信息错误')
else:
return render(request,'user/login.html', {})
def user_logout(request):
logout(request)
return HttpResponseRedirect('/')
| apache-2.0 | 3,434,745,878,098,250,000 | 29.118644 | 94 | 0.613393 | false | 4.355392 | false | false | false |
jgliss/geonum | geonum/topoaccessbase.py | 1 | 22012 | # -*- coding: utf-8 -*-
#
# Geonum is a Python library for geographical calculations in 3D
# Copyright (C) 2017 Jonas Gliss (jonasgliss@gmail.com)
#
# This program is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License a
# published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Access and handling of topographic data
"""
import numpy as np
import os
from warnings import warn
from six import with_metaclass
import srtm
import abc
from geonum import NETCDF_AVAILABLE, LOCAL_TOPO_DIR
from geonum.exceptions import (TopoAccessError, SRTMNotCoveredError)
class TopoAccessBase(with_metaclass(abc.ABCMeta, object)):
"""Abstract base class for topgraphy file implementations
Defines minimum interface for derived access classes of different
topographic datasets.
"""
#local_path = None
#topo_id = None
#: A coordinate for which data should be available
_TESTLAT = 45
_TESTLON = 15
def __init__(self, local_path=None, check_access=True):
self.local_path = local_path
self.topo_id = None
if check_access:
self.check_access()
@abc.abstractmethod
def get_data(self, lat0, lon0, lat1=None, lon1=None):
"""Declaration of data access method
It is obligatory to implement this method into derived classes.
Parameters
----------
lat0 : float
first latitude coordinate of topographic range (lower left coord)
lon0 : float
first longitude coordinate of topographic range (lower left coord)
lat1 : int or float, optional
second latitude coordinate of topographic range (upper right
coord). If None only data around lon0, lat0 will be extracted.
lon1 : int or float, optional
second longitude coordinate of topographic range (upper right
coord). If None only data around lon0, lat0 will be extracted.
Returns
-------
TopoData
instance of TopoData class
"""
pass
def check_access(self):
"""Check if topography data can be accessed"""
from geonum.topodata import TopoData
try:
d = self.get_data(self._TESTLAT, self._TESTLON)
if not isinstance(d, TopoData):
raise ValueError('Invalid return type, expected instance '
'of TopoData class, got {}'.format(type(d)))
return True
except Exception as e:
print('Could not access topodata: {}'.format(repr(e)))
return False
def _prep_borders(self, lat0, lon0, lat1, lon1):
"""Sort by longitudes and determines LL and TR coordinates
Parameters
----------
lat0 : float
first latitude coordinate of topographic range (lower left coord)
lon0 : float
first longitude coordinate of topographic range (lower left coord)
lat1 : float
second latitude coordinate of topographic range (upper right coord)
lon1 : float
second longitude coordinate of topographic range (upper right coord)
Returns
-------
tuple
4-element tuple, containing:
- float, smallest latitude (LL corner)
- float, smallest longitude (LL corner)
- float, largest latitude (TR corner)
- float, largest longitude (TR corner)
"""
lats, lons = np.asarray([lat0, lat1]), np.asarray([lon0, lon1])
return (np.nanmin(lats), np.nanmin(lons),
np.nanmax(lats), np.nanmax(lons))
def _init_lons_lats(self, lats_all, lons_all, lat0, lon0, lat1=None,
lon1=None):
"""Get all latitudes and longitudes on a topodata grid
Parameters
----------
lats_all : ndarray
numpy array containing available latitudes of the accessed topo
dataset
lons_all : ndarray
numpy array containing available longitudes of the accessed topo
dataset
lat0 : float
first latitude coordinate of topographic range (lower left coord)
lon0 : float
first longitude coordinate of topographic range (lower left coord)
lat1 : float, optional
second latitude coordinate of topographic range (upper right coord)
lon1 : float, optional
second longitude coordinate of topographic range (upper right coord)
Returns
-------
tuple
2-element tuple, containing
- ndarray, topodata latitudes overlapping with input range
- ndarray, topodata longitudes overlapping with input range
"""
if any([x is None for x in [lat1, lon1]]):
lat1, lon1 = lat0, lon0
if lon0 > lon1:
lon0, lon1 = lon1, lon0
lat0, lat1 = lat1, lat0
#print lat0, lon0, lat1, lon1
#closest indices
idx_lons = [np.argmin(abs(lons_all - lon0)),
np.argmin(abs(lons_all - lon1))]
idx_lats = [np.argmin(abs(lats_all - lat0)),
np.argmin(abs(lats_all - lat1))]
#Make sure that the retrieved indices actually INCLUDE the input ranges
if idx_lons[0] == 0 and lons_all[0] > lon0:
warn("Error: Lon0 smaller than range covered by file, using first"
" available index in topodata..")
lon0 = lons_all[0]
idx_lons[0] = 0
elif lons_all[idx_lons[0]] > lon0:
idx_lons[0] -= 1
if idx_lons[1] == len(lons_all) - 1 and lons_all[-1] < lon1:
warn("Error: Lon1 larger than range covered by file, using last"
" available index in topodata..")
lon1 = lons_all[-1]
idx_lons[1] = len(lons_all) - 1
elif lons_all[idx_lons[1]] < lon1:
idx_lons[1] += 1
if idx_lats[0] == 0 and lats_all[0] > lat0:
warn("Error: Lat0 smaller than range covered by file, using first"
" available index in topodata..")
lat0 = lats_all[0]
idx_lats[0] = 0
elif lats_all[idx_lats[0]] > lat0:
idx_lats[0] -= 1
if idx_lats[1] == len(lats_all) - 1 and lats_all[-1] < lat1:
warn("Error: Lat1 larger than range covered by file, using last"
" available index in topodata..")
lat1 = lats_all[-1]
idx_lats[1] = len(lats_all) - 1
elif lats_all[idx_lats[1]] < lat1:
idx_lats[1] += 1
#make sure that no odd array lengths occur
if not (idx_lats[1] - idx_lats[0] + 1) %2 == 0:
#try append index at the end
if not idx_lats[1] == len(lats_all) - 1:
idx_lats[1] += 1
elif not idx_lats[0] == 0:
idx_lats[0] -= 1
else:
raise ValueError("Fatal error, odd length of latitude array")
if not (idx_lons[1] - idx_lons[0] + 1) %2 == 0:
#try append index at the end
if not idx_lons[1] == len(lons_all) - 1:
idx_lons[1] += 1
elif not idx_lons[0] == 0:
idx_lons[0] -= 1
else:
raise ValueError("Fatal error, odd length of longitude array")
if idx_lats[0] > idx_lats[1]:
return (lats_all[idx_lats[1] : idx_lats[0] + 1],
lons_all[idx_lons[0] : idx_lons[1] + 1],
idx_lats, idx_lons)
else:
return (lats_all[idx_lats[0] : idx_lats[1] + 1],
lons_all[idx_lons[0] : idx_lons[1] + 1],
idx_lats, idx_lons)
class Etopo1Access(TopoAccessBase):
"""A class representing netCDF4 data access of Etopo1 data
See `here <https://github.com/jgliss/geonum#supported-etopo1-files>`_ for
instructions on the data access.
Attributes
----------
loader
data loader (:class:`netCDF4.Dataset`)
local_path : str
directory where Etopo1 data files are stored
file_name : str
file name of etopo data file
Parameters
----------
local_path : str
directory where Etopo1 data files are stored
file_name : str
file name of etopo data file
check_access : bool
if True, then access to topography data is checked on init and an
error is raised if no dataset can be accessed
search_database : bool
if True and topodata file :attr:`file_path` does not exist, then
a valid topography file is searched in all paths that are specified
in file `~/.geonum/LOCAL_TOPO_PATHS`.
Raises
------
TopoAccessError
if input arg `check_access` is True and if no supported data file
can be found
"""
#: ID of dataset
topo_id = "etopo1"
#: filenames of supported topographic datasets in preferred order
supported_topo_files = ["ETOPO1_Ice_g_gmt4.grd",
"ETOPO1_Bed_g_gmt4.grd"]
def __init__(self, local_path=None, file_name=None, check_access=False,
search_database=True):
if not NETCDF_AVAILABLE:
raise ModuleNotFoundError("Etopo1Access class cannot be initiated. "
"Please install netCDF4 library first")
self._local_path = LOCAL_TOPO_DIR
self._file_name = "ETOPO1_Ice_g_gmt4.grd"
from netCDF4 import Dataset
self.loader = Dataset
if file_name is not None:
self.file_name = file_name
if not os.path.exists(self.file_path) and search_database:
self.search_topo_file_database()
# check if file exists
if check_access:
if not os.path.exists(self.file_path):
raise TopoAccessError('File {} could not be found in local '
'topo directory: {}'.format(self.file_name,
self.local_path))
elif not self.check_access():
raise TopoAccessError('Failed to extract topography data for '
'Etopo dataset')
@property
def local_path(self):
"""Directory containing ETOPO1 gridded data files"""
return self._local_path
@local_path.setter
def local_path(self, val):
if not os.path.exists(val) or not os.path.isdir(val):
raise ValueError(f'Input directory {val} does not exist or is not '
f'a directory...')
self._check_topo_path(val)
self._local_path = val
@property
def file_name(self):
"""File name of topographic dataset used"""
return self._file_name
@file_name.setter
def file_name(self, val):
if not val in self.supported_topo_files:
raise ValueError(
f'Invalid file name for Etopo1 dataset {val}. Valid filenames '
f'are: {self.supported_topo_files}')
self._file_name = val
@property
def file_path(self):
"""Return full file path of current topography file"""
return os.path.join(self.local_path, self.file_name)
@file_path.setter
def file_path(self, val):
self.set_file_location(val)
def _check_topo_path(self, path):
"""Check if path exists and if it is already included in database
Parameters
----------
path : str
path to be checked
"""
from geonum.helpers import check_and_add_topodir
check_and_add_topodir(path)
def _get_all_local_topo_paths(self):
"""Get all search paths for topography files"""
from geonum.helpers import all_topodata_search_dirs
return all_topodata_search_dirs()
def _search_topo_file(self, path=None):
"""Checks if a valid etopo data file can be found in local folder
Searches in ``self.local_path`` for any of the file names specified
in ``supported_topo_files``
"""
if path is None:
path = self.local_path
print(f'Searching valid topo file in folder: {path}')
fnames = os.listdir(path)
for name in fnames:
if name in self.supported_topo_files:
self.file_name = name
self.local_path = path
print(("Found match, setting current filepath: %s"
%self.file_path))
return True
return False
def _find_supported_files(self):
"""Look for all supported files in ``self.local_path```and return list"""
files = os.listdir(self.local_path)
lst = []
for name in files:
if name in self.supported_topo_files:
lst.append(name)
return lst
def search_topo_file_database(self):
"""Checks if a valid topo file can be found in database"""
all_paths = self._get_all_local_topo_paths()
for path in all_paths:
if self._search_topo_file(path):
return True
return False
def set_file_location(self, full_path):
"""Set the full file path of a topography data file
Parameters
----------
full_path : str
full file path of topography file
Raises
------
TopoAccessError
if filepath does not exist or if the provided file is not
supported by this interface.
"""
if not os.path.exists(full_path):
raise TopoAccessError('Input file location %s does not exist'
.format(full_path))
_dir = os.path.dirname(full_path)
_f = os.path.basename(full_path)
if not _f in self.supported_topo_files:
raise TopoAccessError('Invalid topography data file name, please '
'use either of the supported files from the '
'Etopo1 data set: {}'
.format(self.supported_topo_files))
self.local_path = _dir
self.file_name = _f
if not os.path.basename(full_path) in self.supported_topo_files:
raise TopoAccessError("Invalid topography data file, please use "
"one of the supported files from the Etopo1 data set\n%s"
%self.supported_topo_files)
self.local_path = os.path.dirname(full_path)
self.file_name = os.path.basename(full_path)
def get_data(self, lat0, lon0, lat1=None, lon1=None):
"""Retrieve data from topography file
Parameters
----------
lat0 : float
first latitude coordinate of topographic range (lower left coord)
lon0 : float
first longitude coordinate of topographic range (lower left coord)
lat1 : int or float, optional
second latitude coordinate of topographic range (upper right
coord). If None only data around lon0, lat0 will be extracted.
lon1 : int or float, optional
second longitude coordinate of topographic range (upper right
coord). If None only data around lon0, lat0 will be extracted.
Returns
-------
TopoData
instance of TopoData class
"""
from geonum import TopoData
etopo1 = self.loader(self.file_path)
lons = etopo1.variables["x"][:]
lats = etopo1.variables["y"][:]
lats, lons, idx_lats, idx_lons = self._init_lons_lats(lats, lons, lat0,
lon0, lat1, lon1)
vals = np.asarray(etopo1.variables["z"][idx_lats[0] : idx_lats[1] + 1,
idx_lons[0] : idx_lons[1] + 1],
dtype = float)
etopo1.close()
return TopoData(lats, lons, vals, data_id=self.topo_id)
class SRTMAccess(TopoAccessBase):
"""Class for SRTM topographic data access
Uses library `srtm.py <https://pypi.python.org/pypi/SRTM.py/0.3.1>`_
for online access of data.
Note
----
:mod:`srtm.py` downloads the topo data from `this source <http://
dds.cr.usgs.gov/srtm/version2_1/>`_ and stores a copy of the unzipped data
files in the current cache directory found in home.
Whenever data access is requested, the :mod:`srtm.py` checks if the file
already exists on the local machine and if not downloads it online. The
online access is rather slow, so do not be surprised, if things take a
while when accessing a specific location for the first time.
**Deleting cached SRTM files**:
use :func:`geonum.topoaccess.delete_all_local_srtm_files`
Parameters
----------
check_access : bool
check if data can be accessed on class initialisation
**kwargs
additional keyword arguments that are passed through (irrelevant for
this class but relevant for factory loader class
:class:`TopoDataAccess`, particularly :func:`set_mode` therein.
"""
def __init__(self, check_access=False, **kwargs):
"""Class initialisation"""
self.loader = srtm
self.topo_id = "srtm"
if check_access:
self.check_access()
def _coordinate_covered(self, access_obj, lat, lon):
"""Checks if SRTM data is available for input coordinate
Parameters
----------
access_obj : GeoElevationData
data access object from :mod:`srtm.py` module
(can be created calling ``srtm.get_data()``)
lat : float
latitude of point
lon : float
longitude of point
Returns
-------
bool
True, if SRTM data is available for coordinate, else False.
"""
if access_obj.get_file_name(lat, lon) is None:
return False
return True
def get_data(self, lat0, lon0, lat1=None, lon1=None):
"""Load SRTM topographic subset for input range
Parameters
----------
lat0 : float
first latitude coordinate of topographic range (lower left coord)
lon0 : float
first longitude coordinate of topographic range (lower left coord)
lat1 : int or float, optional
second latitude coordinate of topographic range (upper right
coord). If None only data around lon0, lat0 will be extracted.
lon1 : int or float, optional
second longitude coordinate of topographic range (upper right
coord). If None only data around lon0, lat0 will be extracted.
Returns
-------
TopoData
instance of TopoData class
"""
from geonum import TopoData
print("Retrieving SRTM data (this might take a while) ... ")
# create GeoElevationData object for data access
dat = self.loader.get_data()
# check if second input point is specified and set equal first point if
# not
if any([x is None for x in [lat1, lon1]]):
lat1, lon1 = lat0, lon0
# Check if first point is covered by dataset
if not self._coordinate_covered(dat, lat0, lon0):
raise SRTMNotCoveredError('Point (lat={:.2f}, lon={:.2f}) not '
'covered by SRTM'.format(lat0, lon0))
# check if second point is covered by dataset
if not self._coordinate_covered(dat, lat1, lon1):
raise SRTMNotCoveredError('Endpoint coordinate (lat={:.2f}, '
'lon={:.2f}) not covered by SRTM'
.format(lat1, lon1))
# prepare borders of covered lon / lat regime
lat_ll, lon_ll, lat_tr,lon_tr = self._prep_borders(lat0, lon0,
lat1, lon1)
# get SRTM file for lower left corner of regime
f_ll = dat.get_file(lat_ll, lon_ll)
# get SRTM file for top right corner of regime
f_tr = dat.get_file(lat_tr, lon_tr)
# create array of longitude values for regime
lons_all = np.linspace(f_ll.longitude, f_tr.longitude + 1,
f_ll.square_side)
# create array of latitude values for regime
lats_all = np.linspace(f_ll.latitude, f_tr.latitude + 1,
f_ll.square_side)
#prepare coordinates
lats, lons, _, _= self._init_lons_lats(lats_all, lons_all,
lat0, lon0, lat1, lon1)
# Init data array
vals = np.ones((len(lats), len(lons))) * np.nan
#loop over all coordinates and try access the elevation data
for i in range(len(lats)):
for j in range(len(lons)):
#print "Lat: %s, Lon: %s" % (lats[i], lons[j])
vals[i, j] = dat.get_elevation(lats[i], lons[j])
return TopoData(lats, lons, vals, data_id=self.topo_id)
def delete_all_local_srtm_files():
"""Deletes all locally stored SRTM files"""
import glob
from srtm.utils import FileHandler
fh = FileHandler()
for file in glob.glob(f'{fh.local_cache_dir}/*.hgt'):
print('Deleting SRTM data file at {}'.format(file))
os.remove(file)
if __name__ == '__main__':
delete_all_local_srtm_files()
ecc = Etopo1Access() | gpl-3.0 | -8,204,550,211,532,832,000 | 36.953448 | 81 | 0.574596 | false | 4.008742 | false | false | false |
algorhythms/LeetCode | 974 Subarray Sums Divisible by K.py | 1 | 1564 | #!/usr/bin/python3
"""
Given an array A of integers, return the number of (contiguous, non-empty)
subarrays that have a sum divisible by K.
Example 1:
Input: A = [4,5,0,-2,-3,1], K = 5
Output: 7
Explanation: There are 7 subarrays with a sum divisible by K = 5:
[4, 5, 0, -2, -3, 1], [5], [5, 0], [5, 0, -2, -3], [0], [0, -2, -3], [-2, -3]
Note:
1 <= A.length <= 30000
-10000 <= A[i] <= 10000
2 <= K <= 10000
"""
from typing import List
from collections import defaultdict
class Solution:
def subarraysDivByK_2(self, A: List[int], K: int) -> int:
"""
count the prefix sum mod K
nC2
"""
prefix_sum = 0
counter = defaultdict(int)
counter[0] = 1 # important trival case
for a in A:
prefix_sum += a
prefix_sum %= K
counter[prefix_sum] += 1
ret = 0
for v in counter.values():
ret += v * (v-1) // 2
return ret
def subarraysDivByK(self, A: List[int], K: int) -> int:
"""
Prefix sum
O(N^2)
How to optimize?
Mapping to prefix sum to count
Divide: Translate divisible by K into mod.
prefix sum has to be MOD by K.
"""
prefix_sum = 0
counter = defaultdict(int)
counter[0] = 1 # trival case. !important
ret = 0
for a in A:
prefix_sum += a
prefix_sum %= K
ret += counter[prefix_sum] # count of previously matching prefix sum
counter[prefix_sum] += 1
return ret
| mit | 3,167,612,944,419,410,400 | 24.225806 | 81 | 0.523657 | false | 3.37797 | false | false | false |
OpenTrons/opentrons-api | robot-server/robot_server/robot/calibration/session.py | 2 | 10400 | import contextlib
import typing
from uuid import UUID, uuid4
from robot_server.robot.calibration.constants import (
TIP_RACK_LOOKUP_BY_MAX_VOL,
SHORT_TRASH_DECK,
STANDARD_DECK
)
from robot_server.robot.calibration.errors import CalibrationError
from robot_server.robot.calibration.helper_classes import PipetteInfo, \
PipetteRank, LabwareInfo, Moves, CheckMove
from opentrons.config import feature_flags as ff
from opentrons.hardware_control import ThreadManager, Pipette, CriticalPoint
from opentrons.hardware_control.util import plan_arc
from opentrons.protocol_api import labware
from opentrons.protocols.geometry import deck, planning
from opentrons.types import Mount, Point, Location
from robot_server.service.errors import RobotServerError
from .util import save_default_pick_up_current
class SessionManager:
"""Small wrapper to keep track of robot calibration sessions created."""
def __init__(self):
self._sessions = {}
@property
def sessions(self):
return self._sessions
# vector from front bottom left of slot 12
HEIGHT_SAFETY_BUFFER = Point(0, 0, 5.0)
class CalibrationSession:
"""Class that controls state of the current robot calibration session"""
def __init__(self, hardware: ThreadManager,
lights_on_before: bool = False):
self._hardware = hardware
self._lights_on_before = lights_on_before
deck_load_name = SHORT_TRASH_DECK if ff.short_fixed_trash() \
else STANDARD_DECK
self._deck = deck.Deck(load_name=deck_load_name)
self._pip_info_by_mount = self._get_pip_info_by_mount(
hardware.get_attached_instruments())
self._labware_info = self._determine_required_labware()
self._moves = self._build_deck_moves()
@classmethod
async def build(cls, hardware: ThreadManager):
lights_on = hardware.get_lights()['rails']
await hardware.cache_instruments()
await hardware.set_lights(rails=True)
await hardware.home()
return cls(hardware=hardware, lights_on_before=lights_on)
@staticmethod
def _get_pip_info_by_mount(
new_pipettes: typing.Dict[Mount, Pipette.DictType]) \
-> typing.Dict[Mount, PipetteInfo]:
pip_info_by_mount = {}
attached_pips = {m: p for m, p in new_pipettes.items() if p}
num_pips = len(attached_pips)
if num_pips > 0:
for mount, data in attached_pips.items():
if data:
rank = PipetteRank.first
if num_pips == 2 and mount == Mount.LEFT:
rank = PipetteRank.second
cp = None
if data['channels'] == 8:
cp = CriticalPoint.FRONT_NOZZLE
pip_info_by_mount[mount] = PipetteInfo(tiprack_id=None,
critical_point=cp,
rank=rank,
mount=mount)
return pip_info_by_mount
else:
raise RobotServerError(
definition=CalibrationError.NO_PIPETTE_ATTACHED,
flow='calibration check')
def _determine_required_labware(self) -> typing.Dict[UUID, LabwareInfo]:
"""
A function that inserts tiprack information into two dataclasses
:py:class:`.LabwareInfo` and :py:class:`.LabwareDefinition` based
on the current pipettes attached.
"""
lw: typing.Dict[UUID, LabwareInfo] = {}
_prev_lw_uuid: typing.Optional[UUID] = None
for mount, pip_info in self._pip_info_by_mount.items():
load_name: str = self._load_name_for_mount(mount)
prev_lw = lw.get(_prev_lw_uuid, None) if _prev_lw_uuid else None
if _prev_lw_uuid and prev_lw and prev_lw.loadName == load_name:
# pipette uses same tiprack as previous, use existing
lw[_prev_lw_uuid].forMounts.append(mount)
self._pip_info_by_mount[mount].tiprack_id = _prev_lw_uuid
else:
lw_def = labware.get_labware_definition(load_name)
new_uuid: UUID = uuid4()
_prev_lw_uuid = new_uuid
slot = self._get_tip_rack_slot_for_mount(mount)
lw[new_uuid] = LabwareInfo(
alternatives=self._alt_load_names_for_mount(mount),
forMounts=[mount],
loadName=load_name,
slot=slot,
namespace=lw_def['namespace'],
version=lw_def['version'],
id=new_uuid,
definition=lw_def)
self._pip_info_by_mount[mount].tiprack_id = new_uuid
return lw
def _alt_load_names_for_mount(self, mount: Mount) -> typing.List[str]:
pip_vol = self.pipettes[mount]['max_volume']
return list(TIP_RACK_LOOKUP_BY_MAX_VOL[str(pip_vol)].alternatives)
def _load_name_for_mount(self, mount: Mount) -> str:
pip_vol = self.pipettes[mount]['max_volume']
return TIP_RACK_LOOKUP_BY_MAX_VOL[str(pip_vol)].load_name
def _build_deck_moves(self) -> Moves:
return Moves(
joggingFirstPipetteToHeight=self._build_height_dict('5'),
joggingFirstPipetteToPointOne=self._build_cross_dict('1BLC'),
joggingFirstPipetteToPointTwo=self._build_cross_dict('3BRC'),
joggingFirstPipetteToPointThree=self._build_cross_dict('7TLC'),
joggingSecondPipetteToHeight=self._build_height_dict('5'),
joggingSecondPipetteToPointOne=self._build_cross_dict('1BLC'))
def _build_cross_dict(self, pos_id: str) -> CheckMove:
cross_coords = self._deck.get_calibration_position(pos_id).position
return CheckMove(position=Point(*cross_coords), locationId=uuid4())
def _build_height_dict(self, slot: str) -> CheckMove:
pos = self._deck.get_slot_center(slot)
ydim: float\
= self._deck.get_slot_definition(slot)['boundingBox']['yDimension']
# shift down to 10mm +y of the slot edge to both stay clear of the
# slot boundary, avoid the engraved slot number, and avoid the
# tiprack colliding if this is a multi
updated_pos = pos - Point(0, (ydim/2)-10, pos.z) + HEIGHT_SAFETY_BUFFER
return CheckMove(position=updated_pos, locationId=uuid4())
def _get_tip_rack_slot_for_mount(self, mount) -> str:
if len(self._pip_info_by_mount) == 2:
shared_tiprack = self._load_name_for_mount(Mount.LEFT) == \
self._load_name_for_mount(Mount.RIGHT)
if mount == Mount.LEFT and not shared_tiprack:
return '6'
else:
return '8'
else:
return '8'
async def _jog(self, mount: Mount, vector: Point):
"""
General function that can be used by all session types to jog around
a specified pipette.
"""
await self.hardware.move_rel(mount, vector)
async def _pick_up_tip(self, mount: Mount):
pip_info = self._pip_info_by_mount[mount]
instr = self._hardware._attached_instruments[mount]
if pip_info.tiprack_id:
lw_info = self.get_tiprack(pip_info.tiprack_id)
# Note: ABC DeckItem cannot have tiplength b/c of
# mod geometry contexts. Ignore type checking error here.
tiprack = self._deck[lw_info.slot]
full_length = tiprack.tip_length # type: ignore
overlap_dict: typing.Dict =\
self.pipettes[mount]['tip_overlap'] # type: ignore
default = overlap_dict['default']
overlap = overlap_dict.get(
tiprack.uri, # type: ignore
default)
tip_length = full_length - overlap
else:
tip_length = self.pipettes[mount]['fallback_tip_length']
with contextlib.ExitStack() as stack:
if pip_info.critical_point:
# If the pipette we're picking up tip for
# has a critical point, we know it is a multichannel
stack.enter_context(save_default_pick_up_current(instr))
await self.hardware.pick_up_tip(mount, tip_length)
async def _trash_tip(self, mount: Mount):
trash_lw = self._deck.get_fixed_trash()
assert trash_lw
to_loc = trash_lw.wells()[0].top()
await self._move(mount, to_loc, CriticalPoint.XY_CENTER)
await self._drop_tip(mount)
async def _drop_tip(self, mount: Mount):
await self.hardware.drop_tip(mount)
async def cache_instruments(self):
await self.hardware.cache_instruments()
new_dict = self._get_pip_info_by_mount(
self.hardware.get_attached_instruments())
self._pip_info_by_mount.clear()
self._pip_info_by_mount.update(new_dict)
@property
def hardware(self) -> ThreadManager:
return self._hardware
def get_tiprack(self, uuid: UUID) -> LabwareInfo:
return self._labware_info[uuid]
@property
def pipettes(self) -> typing.Dict[Mount, Pipette.DictType]:
return self.hardware.attached_instruments
@property
def labware_status(self) -> typing.Dict[UUID, LabwareInfo]:
"""
Public property to help format the current labware status of a given
session for the client.
"""
return self._labware_info
async def _move(self,
mount: Mount,
to_loc: Location,
cp_override: CriticalPoint = None):
from_pt = await self.hardware.gantry_position(mount)
from_loc = Location(from_pt, None)
cp = cp_override or self._pip_info_by_mount[mount].critical_point
max_height = self.hardware.get_instrument_max_height(mount)
safe = planning.safe_height(
from_loc, to_loc, self._deck, max_height)
moves = plan_arc(from_pt, to_loc.point, safe,
origin_cp=None,
dest_cp=cp)
for move in moves:
await self.hardware.move_to(
mount, move[0], critical_point=move[1])
| apache-2.0 | -5,948,873,792,394,349,000 | 40.935484 | 79 | 0.591442 | false | 3.80395 | false | false | false |
slipstream/SlipStreamConnectors | opennebula/python/tar/slipstream_opennebula/OpenNebulaClientCloud.py | 1 | 19621 | """
SlipStream Client
=====
Copyright (C) 2015 SixSq Sarl (sixsq.com)
=====
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import time
import slipstream.util as util
import slipstream.exceptions.Exceptions as Exceptions
from slipstream.util import override
from slipstream.cloudconnectors.BaseCloudConnector import BaseCloudConnector
from slipstream.utils.ssh import generate_keypair
from slipstream.UserInfo import UserInfo
from slipstream.ConfigHolder import ConfigHolder
import os
import xmlrpclib
import ssl
import urllib
import re
import base64
try:
import xml.etree.cElementTree as eTree # c-version, faster
except ImportError:
import xml.etree.ElementTree as eTree # python version
def getConnector(config_holder):
return getConnectorClass()(config_holder)
def getConnectorClass():
return OpenNebulaClientCloud
def searchInObjectList(list_, property_name, property_value):
for element in list_:
if isinstance(element, dict):
if element.get(property_name) == property_value:
return element
else:
if getattr(element, property_name) == property_value:
return element
return None
def instantiate_from_cimi(cimi_connector, cimi_cloud_credential):
user_info = UserInfo(cimi_connector['instanceName'])
cloud_params = {
UserInfo.CLOUD_USERNAME_KEY: cimi_cloud_credential['key'],
UserInfo.CLOUD_PASSWORD_KEY: cimi_cloud_credential['secret'],
'endpoint': cimi_connector.get('endpoint')
}
user_info.set_cloud_params(cloud_params)
config_holder = ConfigHolder(options={'verboseLevel': 0, 'retry': False})
os.environ['SLIPSTREAM_CONNECTOR_INSTANCE'] = cimi_connector['instanceName']
connector_instance = OpenNebulaClientCloud(config_holder)
connector_instance._initialization(user_info)
return connector_instance
class OpenNebulaClientCloud(BaseCloudConnector):
VM_STATE = [
'Init', # 0
'Pending', # 1
'Hold', # 2
'Active', # 3
'Stopped', # 4
'Suspended', # 5
'Done', # 6
'//Failed', # 7
'Poweroff', # 8
'Undeployed' # 9
]
VM_LCM_STATE = [
'Lcm init', # 0
'Prolog', # 1
'Boot', # 2
'Running', # 3
'Migrate', # 4
'Save stop', # 5
'Save suspend', # 6
'Save migrate', # 7
'Prolog migrate', # 8
'Prolog resume', # 9
'Epilog stop', # 10
'Epilog', # 11
'Shutdown', # 12
'//Cancel', # 13
'//Failure', # 14
'Cleanup resubmit', # 15
'Unknown', # 16
'Hotplug', # 17
'Shutdown poweroff', # 18
'Boot unknown', # 19
'Boot poweroff', # 20
'Boot suspended', # 21
'Boot stopped', # 22
'Cleanup delete', # 23
'Hotplug snapshot', # 24
'Hotplug nic', # 25
'Hotplug saveas', # 26
'Hotplug saveas poweroff', # 27
'Hotplug saveas suspended', # 28
'Shutdown undeploy', # 29
'Epilog undeploy', # 30
'Prolog undeploy', # 31
'Boot undeploy', # 32
'Hotplug prolog poweroff', # 33
'Hotplug epilog poweroff', # 34
'Boot migrate', # 35
'Boot failure', # 36
'Boot migrate failure', # 37
'Prolog migrate failure', # 38
'Prolog failure', # 39
'Epilog failure', # 40
'Epilog stop failure', # 41
'Epilog undeploy failure', # 42
'Prolog migrate poweroff', # 43
'Prolog migrate poweroff failure', # 44
'Prolog migrate suspend', # 45
'Prolog migrate suspend failure', # 46
'Boot undeploy failure', # 47
'Boot stopped failure', # 48
'Prolog resume failure', # 49
'Prolog undeploy failure', # 50
'Disk snapshot poweroff', # 51
'Disk snapshot revert poweroff', # 52
'Disk snapshot delete poweroff', # 53
'Disk snapshot suspended', # 54
'Disk snapshot revert suspended', # 55
'Disk snapshot delete suspended', # 56
'Disk snapshot', # 57
'Disk snapshot revert', # 58
'Disk snapshot delete', # 59
'Prolog migrate unknown', # 60
'Prolog migrate unknown failure' # 61
]
IMAGE_STATE = [
'Init', # 0
'Ready', # 1
'Used', # 2
'Disabled', # 3
'Locked', # 4
'Error', # 5
'Clone', # 6
'Delete', # 7
'Used_pers' # 8
]
def _resize(self, node_instance):
raise Exceptions.ExecutionException(
'{0} doesn\'t implement resize feature.'.format(self.__class__.__name__))
def _detach_disk(self, node_instance):
raise Exceptions.ExecutionException(
'{0} doesn\'t implement detach disk feature.'.format(self.__class__.__name__))
def _attach_disk(self, node_instance):
raise Exceptions.ExecutionException(
'{0} doesn\'t implement attach disk feature.'.format(self.__class__.__name__))
cloudName = 'opennebula'
def __init__(self, config_holder):
super(OpenNebulaClientCloud, self).__init__(config_holder)
self._set_capabilities(contextualization=True,
direct_ip_assignment=True,
orchestrator_can_kill_itself_or_its_vapp=True)
self.user_info = None
def _rpc_execute(self, command, *args):
proxy = self._create_rpc_connection()
remote_function = getattr(proxy, command)
success, output_or_error_msg, err_code = \
remote_function(self._create_session_string(), *args)
if not success:
raise Exceptions.ExecutionException(output_or_error_msg)
return output_or_error_msg
@override
def _initialization(self, user_info, **kwargs):
util.printStep('Initialize the OpenNebula connector.')
self.user_info = user_info
if self.is_build_image():
self.tmp_private_key, self.tmp_public_key = generate_keypair()
self.user_info.set_private_key(self.tmp_private_key)
def format_instance_name(self, name):
new_name = self.remove_bad_char_in_instance_name(name)
return self.truncate_instance_name(new_name)
@staticmethod
def truncate_instance_name(name):
if len(name) <= 128:
return name
else:
return name[:63] + '-' + name[-63:]
@staticmethod
def remove_bad_char_in_instance_name(name):
return re.sub('[^a-zA-Z0-9-]', '', name)
def _set_instance_name(self, vm_name):
return 'NAME = {0}'.format(self.format_instance_name(vm_name))
def _set_disks(self, image_id, disk_size_gb):
try:
img_id = int(image_id)
except:
raise Exception('Something is wrong with image ID : {0}!'.format(image_id))
disk = 'IMAGE_ID = {0:d}'.format(img_id)
if disk_size_gb is None:
return 'DISK = [ {} ]'.format(disk)
else:
try:
disk_size_mb = int(float(disk_size_gb) * 1024)
except:
raise Exception('Something is wrong with root disk size : {0}!'.format(disk_size_gb))
return 'DISK = [ {0}, SIZE={1:d} ]'.format(disk, disk_size_mb)
def _set_additionnal_disks(self, disk_size_gb):
if disk_size_gb is None:
return ''
try:
disk_size_mb = int(float(disk_size_gb) * 1024)
except:
raise Exception('Something wrong with additionnal disk size : {0}!'.format(disk_size_gb))
return 'DISK = [ FORMAT = "ext4", SIZE="{0:d}", TYPE="fs", IO="native" ]'.format(disk_size_mb)
def _set_cpu(self, vm_vcpu, cpu_ratio):
try:
number_vcpu = int(vm_vcpu)
ratio = float(cpu_ratio)
except:
raise Exception('Something wrong with CPU size: cpu = {0} and cpu ratio = {1} !'.format(vm_vcpu, cpu_ratio))
return 'VCPU = {0:d} CPU = {1:f}'.format(number_vcpu, ratio)
def _set_ram(self, vm_ram_gbytes):
try:
ram = int(float(vm_ram_gbytes) * 1024)
except ValueError:
raise Exception('Something wrong with RAM size : {0}!'.format(vm_ram_gbytes))
return 'MEMORY = {0:d}'.format(ram)
def _set_nics(self, requested_network_type, public_network_id, private_network_id):
# extract mappings for Public and Private networks from the connector instance
if requested_network_type.upper() == 'PUBLIC':
try:
network_id = int(public_network_id)
except ValueError:
raise Exception('Something wrong with specified Public Network ID : {0}!'.format(public_network_id))
elif requested_network_type.upper() == 'PRIVATE':
try:
network_id = int(private_network_id)
except ValueError:
raise Exception('Something wrong with specified Private Network ID : {0}!'.format(private_network_id))
else:
return ''
return 'NIC = [ NETWORK_ID = {0:d}, MODEL = "virtio" ]'.format(network_id)
def _set_specific_nic(self, network_specific_name):
network_infos = network_specific_name.split(';')
nic = 'NETWORK = {0}, MODEL = "virtio"'.format(network_infos[0])
if len(network_infos) == 1:
return 'NIC = [ {} ]'.format(nic)
elif len(network_infos) == 2:
return 'NIC = [ {0}, NETWORK_UNAME = {1} ]'.format(nic, network_infos[1])
else:
raise Exception('Something wrong with specified Network name : {0}!'.format(network_specific_name))
def _set_contextualization(self, contextualization_type, public_ssh_key, contextualization_script):
if contextualization_type != 'cloud-init':
return 'CONTEXT = [ NETWORK = "YES", SSH_PUBLIC_KEY = "{0}", ' \
'START_SCRIPT_BASE64 = "{1}"]'.format(public_ssh_key, base64.b64encode(contextualization_script))
else:
return 'CONTEXT = [ PUBLIC_IP = "$NIC[IP]", SSH_PUBLIC_KEY = "{0}", USERDATA_ENCODING = "base64", ' \
'USER_DATA = "{1}"]'.format(public_ssh_key, base64.b64encode(contextualization_script))
@override
def _start_image(self, user_info, node_instance, vm_name):
return self._start_image_on_opennebula(user_info, node_instance, vm_name)
def _start_image_on_opennebula(self, user_info, node_instance, vm_name):
instance_name = self._set_instance_name(vm_name)
ram = self._set_ram(node_instance.get_ram())
cpu = self._set_cpu(node_instance.get_cpu(), user_info.get_cloud('cpuRatio'))
disks = self._set_disks(node_instance.get_image_id(), node_instance.get_root_disk_size())
additionnal_disks = self._set_additionnal_disks(node_instance.get_volatile_extra_disk_size())
try:
network_specific_name = node_instance.get_cloud_parameter('network.specific.name').strip()
except:
network_specific_name = ''
if network_specific_name:
nics = self._set_specific_nic(network_specific_name)
else:
nics = self._set_nics(node_instance.get_network_type(),
user_info.get_public_network_name(),
user_info.get_private_network_name())
if self.is_build_image():
context = self._set_contextualization(node_instance.get_cloud_parameter('contextualization.type'),
self.tmp_public_key, '')
else:
context = self._set_contextualization(node_instance.get_cloud_parameter('contextualization.type'),
self.user_info.get_public_keys(),
self._get_bootstrap_script(node_instance))
custom_vm_template = node_instance.get_cloud_parameter('custom.vm.template') or ''
template = ' '.join([instance_name, cpu, ram, disks, additionnal_disks, nics, context, custom_vm_template])
vm_id = self._rpc_execute('one.vm.allocate', template, False)
vm = self._rpc_execute('one.vm.info', vm_id)
return eTree.fromstring(vm)
@override
def list_instances(self):
vms = eTree.fromstring(self._rpc_execute('one.vmpool.info', -3, -1, -1, -1))
return vms.findall('VM')
@override
def _stop_deployment(self):
for _, vm in self.get_vms().items():
self._rpc_execute('one.vm.action', 'delete', int(vm.findtext('ID')))
@override
def _stop_vms_by_ids(self, ids):
for _id in map(int, ids):
self._rpc_execute('one.vm.action', 'delete', _id)
@override
def _build_image(self, user_info, node_instance):
return self._build_image_on_opennebula(user_info, node_instance)
def _build_image_on_opennebula(self, user_info, node_instance):
listener = self._get_listener()
machine_name = node_instance.get_name()
vm = self._get_vm(machine_name)
ip_address = self._vm_get_ip(vm)
vm_id = int(self._vm_get_id(vm))
self._wait_vm_in_state(vm_id, 'Active', time_out=300, time_sleep=10)
self._build_image_increment(user_info, node_instance, ip_address)
util.printStep('Creation of the new Image.')
self._rpc_execute('one.vm.action', 'poweroff', vm_id)
self._wait_vm_in_state(vm_id, 'Poweroff', time_out=300, time_sleep=10)
listener.write_for(machine_name, 'Saving the image')
new_image_name = node_instance.get_image_short_name() + time.strftime("_%Y%m%d-%H%M%S")
new_image_id = int(self._rpc_execute(
'one.vm.disksaveas', vm_id, 0, new_image_name, '', -1))
self._wait_image_in_state(new_image_id, 'Ready', time_out=1800, time_sleep=30)
listener.write_for(machine_name, 'Image saved !')
self._rpc_execute('one.vm.action', 'resume', vm_id)
self._wait_vm_in_state(vm_id, 'Active', time_out=300, time_sleep=10)
return str(new_image_id)
def _get_vm_state(self, vm_id):
vm = self._rpc_execute('one.vm.info', vm_id)
return int(eTree.fromstring(vm).findtext('STATE'))
def _wait_vm_in_state(self, vm_id, state, time_out, time_sleep=30):
time_stop = time.time() + time_out
current_state = self._get_vm_state(vm_id)
while current_state != self.VM_STATE.index(state):
if time.time() > time_stop:
raise Exceptions.ExecutionException(
'Timed out while waiting VM {0} to enter in state {1}'.format(vm_id, state))
time.sleep(time_sleep)
current_state = self._get_vm_state(vm_id)
return current_state
def _get_image_state(self, image_id):
image = self._rpc_execute('one.image.info', image_id)
return int(eTree.fromstring(image).findtext('STATE'))
def _wait_image_in_state(self, image_id, state, time_out, time_sleep=30):
time_stop = time.time() + time_out
current_state = self._get_image_state(image_id)
while current_state != self.IMAGE_STATE.index(state):
if time.time() > time_stop:
raise Exceptions.ExecutionException(
'Timed out while waiting for image {0} to be in state {1}'.format(image_id, state))
time.sleep(time_sleep)
current_state = self._get_image_state(image_id)
return current_state
def _wait_image_not_in_state(self, image_id, state, time_out, time_sleep=30):
time_stop = time.time() + time_out
current_state = self._get_image_state(image_id)
while current_state == self.IMAGE_STATE.index(state):
if time.time() > time_stop:
raise Exceptions.ExecutionException(
'Timed out while waiting for image {0} to be in state {1}'.format(image_id, state))
time.sleep(time_sleep)
current_state = self._get_image_state(image_id)
return current_state
def _create_session_string(self):
quoted_username = urllib.quote(self.user_info.get_cloud_username(), '')
quoted_password = urllib.quote(self.user_info.get_cloud_password(), '')
return '{0}:{1}'.format(quoted_username, quoted_password)
def _create_rpc_connection(self):
protocol_separator = '://'
parts = self.user_info.get_cloud_endpoint().split(protocol_separator)
url = parts[0] + protocol_separator + self._create_session_string() \
+ "@" + ''.join(parts[1:])
no_certif_check = hasattr(ssl, '_create_unverified_context') and ssl._create_unverified_context() or None
try:
return xmlrpclib.ServerProxy(url, context=no_certif_check)
except TypeError:
return xmlrpclib.ServerProxy(url)
@override
def _vm_get_ip(self, vm):
return vm.findtext('TEMPLATE/NIC/IP')
@override
def _vm_get_id(self, vm):
return vm.findtext('ID')
@override
def _vm_get_state(self, vm):
vm_state = int(vm.findtext('STATE'))
if vm_state == OpenNebulaClientCloud.VM_STATE.index('Active'):
return OpenNebulaClientCloud.VM_LCM_STATE[int(vm.findtext('LCM_STATE'))]
return OpenNebulaClientCloud.VM_STATE[vm_state]
@override
def _vm_get_id_from_list_instances(self, vm):
return self._vm_get_id(vm)
@override
def _vm_get_ip_from_list_instances(self, vm_instance):
return self._vm_get_ip(vm_instance)
@override
def _vm_get_cpu(self, vm_instance):
return vm_instance.findtext('TEMPLATE/VCPU')
@override
def _vm_get_ram(self, vm_instance):
return vm_instance.findtext('TEMPLATE/MEMORY')
@override
def _vm_get_root_disk(self, vm_instance):
return format(int(vm_instance.findtext('TEMPLATE/DISK/SIZE')) / 1024.0, '.3f')
@override
def _vm_get_instance_type(self, vm_instance):
return vm_instance.findtext('USER_TEMPLATE/INSTANCE_TYPE')
| apache-2.0 | -4,327,889,888,004,436,000 | 39.623188 | 120 | 0.567249 | false | 3.804731 | true | false | false |
underscorephil/softlayer-python | SoftLayer/CLI/core.py | 2 | 6133 | """
SoftLayer.CLI.core
~~~~~~~~~~~~~~~~~~
Core for the SoftLayer CLI
:license: MIT, see LICENSE for more details.
"""
from __future__ import print_function
import logging
import os
import sys
import time
import types
import click
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import exceptions
from SoftLayer.CLI import formatting
from SoftLayer import consts
# pylint: disable=too-many-public-methods, broad-except, unused-argument
# pylint: disable=redefined-builtin, super-init-not-called
START_TIME = time.time()
DEBUG_LOGGING_MAP = {
0: logging.CRITICAL,
1: logging.WARNING,
2: logging.INFO,
3: logging.DEBUG
}
VALID_FORMATS = ['table', 'raw', 'json']
DEFAULT_FORMAT = 'raw'
if sys.stdout.isatty():
DEFAULT_FORMAT = 'table'
class CommandLoader(click.MultiCommand):
"""Loads module for click."""
def __init__(self, *path, **attrs):
click.MultiCommand.__init__(self, **attrs)
self.path = path
def list_commands(self, ctx):
"""List all sub-commands."""
env = ctx.ensure_object(environment.Environment)
env.load()
return sorted(env.list_commands(*self.path))
def get_command(self, ctx, name):
"""Get command for click."""
env = ctx.ensure_object(environment.Environment)
env.load()
# Do alias lookup (only available for root commands)
if len(self.path) == 0:
name = env.resolve_alias(name)
new_path = list(self.path)
new_path.append(name)
module = env.get_command(*new_path)
if isinstance(module, types.ModuleType):
return CommandLoader(*new_path, help=module.__doc__ or '')
else:
return module
@click.group(help="SoftLayer Command-line Client",
epilog="""To use most commands your SoftLayer
username and api_key need to be configured. The easiest way to do that is to
use: 'slcli setup'""",
cls=CommandLoader,
context_settings={'help_option_names': ['-h', '--help'],
'auto_envvar_prefix': 'SLCLI'})
@click.option('--format',
default=DEFAULT_FORMAT,
show_default=True,
help="Output format",
type=click.Choice(VALID_FORMATS))
@click.option('--config', '-C',
required=False,
default=click.get_app_dir('softlayer', force_posix=True),
show_default=True,
help="Config file location",
type=click.Path(resolve_path=True))
@click.option('--verbose', '-v',
help="Sets the debug noise level, specify multiple times "
"for more verbosity.",
type=click.IntRange(0, 3, clamp=True),
count=True)
@click.option('--proxy',
required=False,
help="HTTP[S] proxy to be use to make API calls")
@click.option('--really / --not-really', '-y',
is_flag=True,
required=False,
help="Confirm all prompt actions")
@click.option('--demo / --no-demo',
is_flag=True,
required=False,
help="Use demo data instead of actually making API calls")
@click.version_option(prog_name="slcli (SoftLayer Command-line)")
@environment.pass_env
def cli(env,
format='table',
config=None,
verbose=0,
proxy=None,
really=False,
demo=False,
**kwargs):
"""Main click CLI entry-point."""
if verbose > 0:
logger = logging.getLogger()
logger.addHandler(logging.StreamHandler())
logger.setLevel(DEBUG_LOGGING_MAP.get(verbose, logging.DEBUG))
# Populate environement with client and set it as the context object
env.skip_confirmations = really
env.config_file = config
env.format = format
env.ensure_client(config_file=config, is_demo=demo, proxy=proxy)
env.vars['_start'] = time.time()
env.vars['_timings'] = SoftLayer.TimingTransport(env.client.transport)
env.client.transport = env.vars['_timings']
@cli.resultcallback()
@environment.pass_env
def output_diagnostics(env, verbose=0, **kwargs):
"""Output diagnostic information."""
if verbose > 0:
diagnostic_table = formatting.Table(['name', 'value'])
diagnostic_table.add_row(['execution_time',
'%fs' % (time.time() - START_TIME)])
api_call_value = []
for call, _, duration in env.vars['_timings'].get_last_calls():
api_call_value.append(
"%s::%s (%fs)" % (call.service, call.method, duration))
diagnostic_table.add_row(['api_calls', api_call_value])
diagnostic_table.add_row(['version', consts.USER_AGENT])
diagnostic_table.add_row(['python_version', sys.version])
diagnostic_table.add_row(['library_location',
os.path.dirname(SoftLayer.__file__)])
env.err(env.fmt(diagnostic_table))
def main(reraise_exceptions=False, **kwargs):
"""Main program. Catches several common errors and displays them nicely."""
exit_status = 0
try:
cli.main(**kwargs)
except SoftLayer.SoftLayerAPIError as ex:
if 'invalid api token' in ex.faultString.lower():
print("Authentication Failed: To update your credentials,"
" use 'slcli config setup'")
exit_status = 1
else:
print(str(ex))
exit_status = 1
except SoftLayer.SoftLayerError as ex:
print(str(ex))
exit_status = 1
except exceptions.CLIAbort as ex:
print(str(ex.message))
exit_status = ex.code
except Exception:
if reraise_exceptions:
raise
import traceback
print("An unexpected error has occured:")
print(str(traceback.format_exc()))
print("Feel free to report this error as it is likely a bug:")
print(" https://github.com/softlayer/softlayer-python/issues")
exit_status = 1
sys.exit(exit_status)
if __name__ == '__main__':
main()
| mit | -5,412,039,975,985,436,000 | 30.777202 | 79 | 0.598728 | false | 3.977302 | true | false | false |
linktlh/Toontown-journey | toontown/nametag/Nametag.py | 2 | 8810 | from direct.task.Task import Task
from pandac.PandaModules import TextNode, VBase4
from toontown.chat.ChatBalloon import ChatBalloon
from toontown.nametag import NametagGlobals
class Nametag:
TEXT_WORD_WRAP = 8
TEXT_Y_OFFSET = -0.05
CHAT_TEXT_WORD_WRAP = 12
PANEL_X_PADDING = 0.2
PANEL_Z_PADDING = 0.2
CHAT_BALLOON_ALPHA = 1
def __init__(self):
self.avatar = None
self.panel = None
self.icon = None
self.chatBalloon = None
self.chatButton = NametagGlobals.noButton
self.chatReversed = False
self.font = None
self.chatFont = None
self.chatType = NametagGlobals.CHAT
self.chatBalloonType = NametagGlobals.CHAT_BALLOON
self.nametagColor = NametagGlobals.NametagColors[NametagGlobals.CCNormal]
self.chatColor = NametagGlobals.ChatColors[NametagGlobals.CCNormal]
self.speedChatColor = self.chatColor[0][1]
self.nametagHidden = False
self.chatHidden = False
self.thoughtHidden = False
# Create our TextNodes:
self.textNode = TextNode('text')
self.textNode.setWordwrap(self.TEXT_WORD_WRAP)
self.textNode.setAlign(TextNode.ACenter)
self.chatTextNode = TextNode('chatText')
self.chatTextNode.setWordwrap(self.CHAT_TEXT_WORD_WRAP)
self.chatTextNode.setGlyphScale(ChatBalloon.TEXT_GLYPH_SCALE)
self.chatTextNode.setGlyphShift(ChatBalloon.TEXT_GLYPH_SHIFT)
# Add the tick task:
self.tickTaskName = self.getUniqueName() + '-tick'
self.tickTask = taskMgr.add(self.tick, self.tickTaskName, sort=45)
def destroy(self):
if self.tickTask is not None:
taskMgr.remove(self.tickTask)
self.tickTask = None
self.chatTextNode = None
self.textNode = None
self.chatFont = None
self.font = None
self.chatButton = NametagGlobals.noButton
if self.chatBalloon is not None:
self.chatBalloon.removeNode()
self.chatBalloon = None
if self.icon is not None:
self.icon.removeAllChildren()
self.icon = None
if self.panel is not None:
self.panel.removeNode()
self.panel = None
self.avatar = None
def getUniqueName(self):
return 'Nametag-' + str(id(self))
def getChatBalloonModel(self):
pass # Inheritors should override this method.
def getChatBalloonWidth(self):
pass # Inheritors should override this method.
def getChatBalloonHeight(self):
pass # Inheritors should override this method.
def tick(self, task):
return Task.done # Inheritors should override this method.
def updateClickRegion(self):
pass # Inheritors should override this method.
def drawChatBalloon(self, model, modelWidth, modelHeight):
pass # Inheritors should override this method.
def drawNametag(self):
pass # Inheritors should override this method.
def setAvatar(self, avatar):
self.avatar = avatar
def getAvatar(self):
return self.avatar
def setIcon(self, icon):
self.icon = icon
def getIcon(self):
return self.icon
def setChatButton(self, chatButton):
self.chatButton = chatButton
def getChatButton(self):
return self.chatButton
def hasChatButton(self):
if (self.chatBalloonType == NametagGlobals.CHAT_BALLOON) and self.chatHidden:
return False
if (self.chatBalloonType == NametagGlobals.THOUGHT_BALLOON) and self.thoughtHidden:
return False
return self.chatButton != NametagGlobals.noButton
def setChatReversed(self, chatReversed):
self.chatReversed = chatReversed
def getChatReversed(self):
return self.chatReversed
def setFont(self, font):
self.font = font
if self.font is not None:
self.textNode.setFont(self.font)
self.update()
def getFont(self):
return self.font
def setChatFont(self, chatFont):
self.chatFont = chatFont
if self.chatFont is not None:
self.chatTextNode.setFont(self.chatFont)
self.update()
def getChatFont(self):
return self.chatFont
def setChatType(self, chatType):
self.chatType = chatType
def getChatType(self):
return self.chatType
def setChatBalloonType(self, chatBalloonType):
self.chatBalloonType = chatBalloonType
def getChatBalloonType(self):
return self.chatBalloonType
def setNametagColor(self, nametagColor):
self.nametagColor = nametagColor
def getNametagColor(self):
return self.nametagColor
def setChatColor(self, chatColor):
self.chatColor = chatColor
def getChatColor(self):
return self.chatColor
def setSpeedChatColor(self, speedChatColor):
self.speedChatColor = speedChatColor
def getSpeedChatColor(self):
return self.speedChatColor
def hideNametag(self):
self.nametagHidden = True
def showNametag(self):
self.nametagHidden = False
def hideChat(self):
self.chatHidden = True
def showChat(self):
self.chatHidden = False
def hideThought(self):
self.thoughtHidden = True
def showThought(self):
self.thoughtHidden = False
def applyClickState(self, clickState):
if self.chatBalloon is not None:
foreground, background = self.chatColor[clickState]
if self.chatType == NametagGlobals.SPEEDCHAT:
background = self.speedChatColor
if background[3] > self.CHAT_BALLOON_ALPHA:
background = VBase4(
background[0], background[1], background[2],
self.CHAT_BALLOON_ALPHA)
self.chatBalloon.setForeground(foreground)
self.chatBalloon.setBackground(background)
self.chatBalloon.setButton(self.chatButton[clickState])
elif self.panel is not None:
foreground, background = self.nametagColor[clickState]
self.setForeground(foreground)
self.setBackground(background)
def setText(self, text):
self.textNode.setText(text)
def getText(self):
return self.textNode.getText()
def setChatText(self, chatText):
self.chatTextNode.setText(chatText)
def getChatText(self):
return self.chatTextNode.getText()
def setWordWrap(self, wordWrap):
if wordWrap is None:
wordWrap = self.TEXT_WORD_WRAP
self.textNode.setWordwrap(wordWrap)
self.update()
def getWordWrap(self):
return self.textNode.getWordwrap()
def setChatWordWrap(self, chatWordWrap):
if (chatWordWrap is None) or (chatWordWrap > self.CHAT_TEXT_WORD_WRAP):
chatWordWrap = self.CHAT_TEXT_WORD_WRAP
self.chatTextNode.setWordwrap(chatWordWrap)
self.update()
def getChatWordWrap(self):
return self.chatTextNode.getWordwrap()
def setForeground(self, foreground):
self.textNode.setTextColor(foreground)
def setBackground(self, background):
if self.panel is not None:
self.panel.setColor(background)
def setShadow(self, shadow):
self.textNode.setShadow(shadow)
def getShadow(self):
return self.textNode.getShadow()
def clearShadow(self):
self.textNode.clearShadow()
def update(self):
if self.chatBalloon is not None:
self.chatBalloon.removeNode()
self.chatBalloon = None
if self.panel is not None:
self.panel.removeNode()
self.panel = None
if self.getChatText():
if self.chatBalloonType == NametagGlobals.CHAT_BALLOON:
if not self.chatHidden:
model = self.getChatBalloonModel()
modelWidth = self.getChatBalloonWidth()
modelHeight = self.getChatBalloonHeight()
self.drawChatBalloon(model, modelWidth, modelHeight)
return
elif self.chatBalloonType == NametagGlobals.THOUGHT_BALLOON:
if not self.thoughtHidden:
model = NametagGlobals.thoughtBalloonModel
modelWidth = NametagGlobals.thoughtBalloonWidth
modelHeight = NametagGlobals.thoughtBalloonHeight
self.drawChatBalloon(model, modelWidth, modelHeight)
return
if hasattr(self.avatar, 'ghostMode'):
if self.avatar.ghostMode == 2:
return
if self.getText() and (not self.nametagHidden):
self.drawNametag()
| apache-2.0 | -2,065,280,034,159,601,700 | 28.563758 | 91 | 0.636209 | false | 3.973839 | false | false | false |
dhowland/EasyAVR | keymapper/setup.py | 1 | 2356 | #!/usr/bin/env python3
#
# Easy AVR USB Keyboard Firmware
# Copyright (C) 2013-2020 David Howland
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
from setuptools import setup, find_packages
from easykeymap import __version__
setup(
name = 'easykeymap',
version = __version__,
author = 'David Howland',
author_email = 'dhowland@gmail.com',
description = 'Easy AVR USB Keyboard Firmware Keymapper',
long_description = 'Easy to use keymapping GUI for keyboards based on USB AVRs.',
license = "GPLv2",
keywords = "Easy AVR Keymap keyboard firmware",
url = 'https://github.com/dhowland/EasyAVR',
platforms = 'any',
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: MacOS X',
'Environment :: Win32 (MS Windows)',
'Environment :: X11 Applications :: GTK',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: C',
'Topic :: Utilities',
],
install_requires = ['wxPython >= 4.1.0'],
packages = find_packages(),
package_data = {
'easykeymap': ['builds/*.hex', 'configs/*.cfg', 'res/*.*']
},
entry_points = {
'gui_scripts': [
'easykeymap = easykeymap.__main__:main',
]
}
)
| gpl-2.0 | 8,700,526,508,964,115,000 | 37 | 85 | 0.637946 | false | 4.076125 | false | false | false |
abalakh/robottelo | tests/foreman/ui/test_discoveredhosts.py | 1 | 13088 | # -*- encoding: utf-8 -*-
"""Test class for Foreman Discovery"""
from fauxfactory import gen_string, gen_mac
from nailgun import entities
from robottelo.config import conf
from robottelo.decorators import stubbed
from robottelo import ssh
from robottelo.test import UITestCase
from robottelo.ui.session import Session
from time import sleep
class Discovery(UITestCase):
"""Implements Foreman discovery tests in UI."""
name = gen_string("alpha")
image_path = '/var/lib/libvirt/images/{0}.img'.format(name)
def _pxe_boot_host(self, mac):
"""PXE boot a unknown host"""
libvirt_server = 'qemu+tcp://{0}:16509/system'.format(
conf.properties['main.server.hostname'])
ssh.command('virt-install --hvm --network=bridge:virbr1, --mac={0} '
'--pxe --name {1} --ram=1024 --vcpus=1 --os-type=linux '
'--os-variant=rhel7 --disk path={2},size=10 --connect {3} '
'--noautoconsole'
.format(mac, self.name, self.image_path, libvirt_server))
sleep(30)
@classmethod
def setUpClass(cls):
"""Steps to Configure foreman discovery
1. Build PXE default template
2. Create Organization/Location
3. Update Global parameters to set default org and location for
discovered hosts.
4. Enable auto_provision flag to perform discovery via discovery rules.
"""
# Build PXE default template to get default PXE file
entities.ConfigTemplate().build_pxe_default()
# Create Org and location
cls.org = entities.Organization(name=gen_string("alpha")).create()
cls.org_name = cls.org.name
cls.loc = entities.Location(
name=gen_string('alpha'),
organization=[cls.org],
).create()
# Update default org and location params to place discovered host
cls.discovery_loc = entities.Setting().search(
query={'search': 'name="discovery_location"'})[0]
cls.discovery_loc.value = cls.loc.name
cls.discovery_loc.update({'value'})
cls.discovery_org = entities.Setting().search(
query={'search': 'name="discovery_organization"'})[0]
cls.discovery_org.value = cls.org.name
cls.discovery_org.update({'value'})
# Enable flag to auto provision discovered hosts via discovery rules
cls.discovery_auto = entities.Setting().search(
query={'search': 'name="discovery_auto"'})[0]
cls.default_discovery_auto = str(cls.discovery_auto.value)
cls.discovery_auto.value = 'True'
cls.discovery_auto.update({'value'})
super(Discovery, cls).setUpClass()
@classmethod
def tearDownClass(cls):
"""Restore default 'discovery_auto' global setting's value"""
cls.discovery_auto.value = cls.default_discovery_auto
cls.discovery_auto.update({'value'})
super(Discovery, cls).tearDownClass()
def tearDown(self):
"""Delete the pxe host to free the resources"""
ssh.command('virsh destroy {0}'.format(self.name))
ssh.command('virsh undefine {0}'.format(self.name))
ssh.command('virsh vol-delete --pool default {0}'
.format(self.image_path))
super(Discovery, self).tearDown()
def test_host_discovery(self):
"""@Test: Discover a host via proxy by setting "proxy.type=proxy" in
PXE default
@Feature: Foreman Discovery
@Setup: Provisioning should be configured
@Steps: PXE boot a host/VM
@Assert: Host should be successfully discovered
"""
mac = gen_mac(multicast=True, locally=True)
hostname = 'mac{0}'.format(mac.replace(':', ""))
self._pxe_boot_host(mac)
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
self.assertIsNotNone(self.discoveredhosts.search(hostname))
@stubbed()
def test_host_discovery_facts(self):
"""@Test: Check all facts of discovered hosts are correctly displayed
@Feature: Foreman Discovery
@Setup: Provisioning should be configured
@Steps: Validate IP, memory, mac etc of discovered host
@Assert: All facts should be displayed correctly
@Status: Manual
"""
@stubbed()
def test_provision_discovered_host_1(self):
"""@Test: Provision the selected discovered host by selecting
'provision' button
@Feature: Foreman Discovery
@Setup: Host should already be discovered
@Assert: Host should be provisioned successfully and entry from
discovered host should be auto removed
@Status: Manual
"""
@stubbed()
def test_provision_discovered_host_2(self):
"""@Test: Provision the selected discovered host from facts page by
clicking 'provision'
@Feature: Foreman Discovery
@Setup: Host should already be discovered
@Assert: Host should be provisioned successfully and entry from
discovered host should be auto removed
@Status: Manual
"""
def test_delete_discovered_host_1(self):
"""@Test: Delete the selected discovered host
@Feature: Foreman Discovery
@Setup: Host should already be discovered
@Assert: Selected host should be removed successfully
"""
mac = gen_mac(multicast=True, locally=True)
hostname = 'mac{0}'.format(mac.replace(':', ""))
self._pxe_boot_host(mac)
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
self.discoveredhosts.delete(hostname)
@stubbed()
def test_delete_discovered_host_2(self):
"""@Test: Delete the selected discovered host from facts page
@Feature: Foreman Discovery
@Setup: Host should already be discovered
@Assert: Selected host should be removed successfully
@Status: Manual
"""
@stubbed()
def test_delete_multiple_discovered_hosts(self):
"""@Test: Delete multiple discovered hosts from 'Select Action'
drop down
@Feature: Foreman Discovery
@Setup: Host should already be discovered
@Assert: Selected host should be removed successfully
@Status: Manual
"""
@stubbed()
def test_refresh_discovered_host_facts(self):
"""@Test: Refresh the facts of discovered hosts
@Feature: Foreman Discovery
@Setup: Host should already be discovered
@Assert: Facts should be refreshed successfully
ToDo: Need to check what we change on host that its updated with
refresh facts
@Status: Manual
"""
@stubbed()
def test_change_default_org(self):
"""@Test: Change the default org of more than one discovered hosts
from 'Select Action' drop down
@Feature: Foreman Discovery
@Setup: Host should already be discovered
@Assert: Default org should be successfully changed for multiple hosts
@Status: Manual
"""
@stubbed()
def test_change_default_location(self):
"""@Test: Change the default location of more than one discovered hosts
from 'Select Action' drop down
@Feature: Foreman Discovery
@Setup: Host should already be discovered
@Assert: Default Location should be successfully changed for multiple
hosts
@Status: Manual
"""
@stubbed()
def test_create_discovery_rule_1(self):
"""@Test: Create a new discovery rule
Set query as (e.g IP=IP_of_discovered_host)
@Feature: Foreman Discovery
@Setup: Host should already be discovered
@Assert: Host should reboot and provision
@Status: Manual
"""
@stubbed()
def test_create_discovery_rule_2(self):
"""@Test: Create a new discovery rule with (host_limit = 0)
that applies to multi hosts.
Set query as cpu_count = 1 OR mem > 500
@Feature: Foreman Discovery
@Setup: Host should already be discovered
@Assert: All Hosts of same subnet should reboot and provision
@Status: Manual
"""
@stubbed()
def test_create_discovery_rule_3(self):
"""@Test: Create multiple discovery rules with different priority
@Feature: Foreman Discovery
@Setup: Multiple hosts should already be discovered
@Assert: Host with lower count have higher priority
and that rule should be executed first
@Status: Manual
"""
@stubbed()
def test_create_discovery_rule_4(self):
"""@Test: Create a discovery rule and execute it when
"auto_provisioning" flag set to 'false'
@Feature: Foreman Discovery
@Setup: Host should already be discovered
@Assert: Host should not be rebooted automatically
@Status: Manual
"""
@stubbed()
def test_create_discovery_rule_5(self):
"""@Test: Create a discovery rule with invalid query
e.g. BIOS = xyz
@Feature: Foreman Discovery
@Setup: Host should already be discovered
@Assert: Rule should automatically be skipped on clicking
'Auto provision'. UI Should raise 'No matching rule found'
@Status: Manual
"""
@stubbed()
def test_create_discovery_rule_6(self):
"""@Test: Create a discovery rule (CPU_COUNT = 2) with host limit 1 and
provision more than one host with same rule
@Feature: Foreman Discovery
@Setup: Host with two CPUs should already be discovered
@Assert: Rule should only be applied to one discovered host and for
other rule should already be skipped.
@Status: Manual
"""
@stubbed()
def test_update_discovery_rule_1(self):
"""@Test: Update an existing rule and execute it
@Feature: Foreman Discovery
@Setup: Host should already be discovered
@Assert: User should be able to update the rule and it should be
executed on discovered host
@Status: Manual
"""
@stubbed()
def test_update_discovery_rule_2(self):
"""@Test: Update the discovered host name and provision it
@Feature: Foreman Discovery
@Setup: Host should already be discovered
@Assert: The hostname should be updated and host should be provisioned
@Status: Manual
"""
@stubbed()
def test_update_discovery_prefix(self):
"""@Test: Update the discovery_prefix parameter other than mac
@Feature: Foreman Discovery
@Steps:
1. Goto settings ← Discovered tab -> discovery_prefix
2. Edit discovery_prefix using any text that must start with a letter
@Setup: Host should already be discovered
@Assert: discovery_prefix is updated and provisioned host has same
prefix in its hostname
@Status: Manual
"""
@stubbed()
def test_auto_provision_all(self):
"""@Test: Discover a bunch of hosts and auto-provision all
@Feature: Foreman Discovery
@Assert: All host should be successfully rebooted and provisioned
@Status: Manual
"""
@stubbed()
def test_add_new_discovery_fact(self):
"""@Test: Add a new fact column to display on discovered host page
@Feature: Foreman Discovery
@Steps:
1. Goto settings -> Discovered tab -> discovery_fact_coloumn
2. Edit discovery_fact_coloumn
3. Add uuid or os
@Assert: The added fact should be displayed on 'discovered_host' page
after successful discovery
@Status: Manual
"""
@stubbed()
def test_add_invalid_discovery_fact(self):
"""@Test: Add a new fact column with invalid fact to display on
discovered host page
@Feature: Foreman Discovery
@Steps:
1. Goto settings -> Discovered tab -> discovery_fact_coloumn
2. Edit discovery_fact_coloumn
3. Add 'test'
@Assert: The added fact should be displayed on 'discovered_host' page
after successful discovery and shows 'N/A'
@Status: Manual
"""
@stubbed()
def test_discovery_manager_role(self):
"""@Test: Assign 'Discovery_Manager' role to a normal user
@Feature: Foreman Discovery
@Assert: User should be able to view, provision, edit and destroy one
or more discovered host as well view, create_new, edit, execute and
delete discovery rules.
@Status: Manual
"""
@stubbed()
def test_discovery_role(self):
"""@Test: Assign 'Discovery" role to a normal user
@Feature: Foreman Discovery
@Assert: User should be able to view, provision, edit and destroy one
or more discovered host
@Status: Manual
"""
| gpl-3.0 | -384,245,035,384,904,500 | 26.728814 | 79 | 0.624083 | false | 4.390473 | true | false | false |
Arzie/deluge | deluge/ui/console/commander.py | 3 | 4659 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2008-2009 Ido Abramovich <ido.deluge@gmail.com>
# Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com>
# Copyright (C) 2011 Nick Lanham <nick@afternight.org>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
from __future__ import print_function
import logging
import sys
from twisted.internet import defer
import deluge.component as component
from deluge.error import DelugeError
from deluge.ui.client import client
from deluge.ui.console.colors import strip_colors
log = logging.getLogger(__name__)
class Commander:
def __init__(self, cmds, interactive=False):
self._commands = cmds
self.console = component.get("ConsoleUI")
self.interactive = interactive
def write(self, line):
print(strip_colors(line))
def do_command(self, cmd):
"""
Processes a command.
:param cmd: str, the command string
"""
if not cmd:
return
cmd, _, line = cmd.partition(" ")
try:
parser = self._commands[cmd].create_parser()
except KeyError:
self.write("{!error!}Unknown command: %s" % cmd)
return
args = self._commands[cmd].split(line)
# Do a little hack here to print 'command --help' properly
parser._print_help = parser.print_help
def print_help(f=None):
if self.interactive:
self.write(parser.format_help())
else:
parser._print_help(f)
parser.print_help = print_help
# Only these commands can be run when not connected to a daemon
not_connected_cmds = ["help", "connect", "quit"]
aliases = []
for c in not_connected_cmds:
aliases.extend(self._commands[c].aliases)
not_connected_cmds.extend(aliases)
if not client.connected() and cmd not in not_connected_cmds:
self.write("{!error!}Not connected to a daemon, please use the connect command first.")
return
try:
options, args = parser.parse_args(args)
except TypeError as ex:
self.write("{!error!}Error parsing options: %s" % ex)
return
if not getattr(options, "_exit", False):
try:
ret = self._commands[cmd].handle(*args, **options.__dict__)
except Exception as ex:
self.write("{!error!} %s" % ex)
log.exception(ex)
import traceback
self.write("%s" % traceback.format_exc())
return defer.succeed(True)
else:
return ret
def exec_args(self, args, host, port, username, password):
commands = []
if args:
# Multiple commands split by ";"
commands = [arg.strip() for arg in args.split(";")]
def on_connect(result):
def on_started(result):
def on_started(result):
def do_command(result, cmd):
return self.do_command(cmd)
d = defer.succeed(None)
for command in commands:
if command in ("quit", "exit"):
break
d.addCallback(do_command, command)
d.addCallback(do_command, "quit")
# We need to wait for the rpcs in start() to finish before processing
# any of the commands.
self.console.started_deferred.addCallback(on_started)
component.start().addCallback(on_started)
def on_connect_fail(reason):
if reason.check(DelugeError):
rm = reason.value.message
else:
rm = reason.getErrorMessage()
if host:
print("Could not connect to daemon: %s:%s\n %s" % (host, port, rm))
else:
print("Could not connect to localhost daemon\n %s" % rm)
self.do_command("quit")
if host:
d = client.connect(host, port, username, password)
else:
d = client.connect()
if not self.interactive:
if commands[0].startswith("connect"):
d = self.do_command(commands.pop(0))
elif "help" in commands:
self.do_command("help")
sys.exit(0)
d.addCallback(on_connect)
d.addErrback(on_connect_fail)
| gpl-3.0 | 1,722,984,902,554,104,600 | 32.76087 | 99 | 0.555055 | false | 4.274312 | false | false | false |
eri-trabiccolo/exaile | xl/event.py | 1 | 12090 | # Copyright (C) 2008-2010 Adam Olsen
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#
# The developers of the Exaile media player hereby grant permission
# for non-GPL compatible GStreamer and Exaile plugins to be used and
# distributed together with GStreamer and Exaile. This permission is
# above and beyond the permissions granted by the GPL license by which
# Exaile is covered. If you modify this code, you may extend this
# exception to your version of the code, but you are not obligated to
# do so. If you do not wish to do so, delete this exception statement
# from your version.
"""
Provides a signals-like system for sending and listening for 'events'
Events are kind of like signals, except they may be listened for on a
global scale, rather than connected on a per-object basis like signals
are. This means that ANY object can emit ANY event, and these events may
be listened for by ANY object.
Events should be emitted AFTER the given event has taken place. Often the
most appropriate spot is immediately before a return statement.
"""
from __future__ import with_statement
from inspect import ismethod
import logging
from new import instancemethod
import re
import threading
import time
import traceback
import weakref
import glib
from xl import common
from xl.nls import gettext as _
# define this here so the interperter doesn't complain
EVENT_MANAGER = None
logger = logging.getLogger(__name__)
class Nothing(object):
pass
_NONE = Nothing() # used by event for a safe None replacement
def log_event(type, obj, data):
"""
Sends an event.
:param type: the *type* or *name* of the event.
:type type: string
:param obj: the object sending the event.
:type obj: object
:param data: some data about the event, None if not required
:type data: object
"""
global EVENT_MANAGER
e = Event(type, obj, data, time.time())
EVENT_MANAGER.emit(e)
def add_callback(function, type=None, obj=None, *args, **kwargs):
"""
Adds a callback to an event
You should ALWAYS specify one of the two options on what to listen
for. While not forbidden to listen to all events, doing so will
cause your callback to be called very frequently, and possibly may
cause slowness within the player itself.
:param function: the function to call when the event happens
:type function: callable
:param type: the *type* or *name* of the event to listen for, eg
`tracks_added`, `cover_changed`. Defaults to any event if
not specified.
:type type: string
:param obj: the object to listen to events from, e.g. `exaile.collection`
or `xl.covers.MANAGER`. Defaults to any object if not
specified.
:type obj: object
Any additional parameters will be passed to the callback.
:returns: a convenience function that you can call to remove the callback.
"""
global EVENT_MANAGER
return EVENT_MANAGER.add_callback(function, type, obj, args, kwargs)
def remove_callback(function, type=None, obj=None):
"""
Removes a callback
The parameters passed should match those that were passed when adding
the callback
"""
global EVENT_MANAGER
EVENT_MANAGER.remove_callback(function, type, obj)
class Event(object):
"""
Represents an Event
"""
def __init__(self, type, obj, data, time):
"""
type: the 'type' or 'name' for this Event [string]
obj: the object emitting the Event [object]
data: some piece of data relevant to the Event [object]
"""
self.type = type
self.object = obj
self.data = data
self.time = time
class Callback(object):
"""
Represents a callback
"""
def __init__(self, function, time, args, kwargs):
"""
@param function: the function to call
@param time: the time this callback was added
"""
self.valid = True
self.wfunction = _getWeakRef(function, self.vanished)
self.time = time
self.args = args
self.kwargs = kwargs
def vanished(self, ref):
self.valid = False
class _WeakMethod:
"""Represent a weak bound method, i.e. a method doesn't keep alive the
object that it is bound to. It uses WeakRef which, used on its own,
produces weak methods that are dead on creation, not very useful.
Typically, you will use the getRef() function instead of using
this class directly. """
def __init__(self, method, notifyDead = None):
"""
The method must be bound. notifyDead will be called when
object that method is bound to dies.
"""
assert ismethod(method)
if method.im_self is None:
raise ValueError, "We need a bound method!"
if notifyDead is None:
self.objRef = weakref.ref(method.im_self)
else:
self.objRef = weakref.ref(method.im_self, notifyDead)
self.fun = method.im_func
self.cls = method.im_class
def __call__(self):
if self.objRef() is None:
return None
else:
return instancemethod(self.fun, self.objRef(), self.cls)
def __eq__(self, method2):
if not isinstance(method2, _WeakMethod):
return False
return self.fun is method2.fun \
and self.objRef() is method2.objRef() \
and self.objRef() is not None
def __hash__(self):
return hash(self.fun)
def __repr__(self):
dead = ''
if self.objRef() is None:
dead = '; DEAD'
obj = '<%s at %s%s>' % (self.__class__, id(self), dead)
return obj
def refs(self, weakRef):
"""Return true if we are storing same object referred to by weakRef."""
return self.objRef == weakRef
def _getWeakRef(obj, notifyDead=None):
"""
Get a weak reference to obj. If obj is a bound method, a _WeakMethod
object, that behaves like a WeakRef, is returned, if it is
anything else a WeakRef is returned. If obj is an unbound method,
a ValueError will be raised.
"""
if ismethod(obj):
createRef = _WeakMethod
else:
createRef = weakref.ref
if notifyDead is None:
return createRef(obj)
else:
return createRef(obj, notifyDead)
class EventManager(object):
"""
Manages all Events
"""
def __init__(self, use_logger=False, logger_filter=None):
self.callbacks = {}
self.use_logger = use_logger
self.logger_filter = logger_filter
# RLock is needed so that event callbacks can themselves send
# synchronous events and add or remove callbacks
self.lock = threading.RLock()
def emit(self, event):
"""
Emits an Event, calling any registered callbacks.
event: the Event to emit [Event]
"""
emit_logmsg = self.use_logger and (not self.logger_filter or \
re.search(self.logger_filter, event.type))
with self.lock:
callbacks = set()
for tcall in [_NONE, event.type]:
for ocall in [_NONE, event.object]:
try:
callbacks.update(self.callbacks[tcall][ocall])
except KeyError:
pass
# now call them
for cb in callbacks:
try:
if not cb.valid:
try:
self.callbacks[event.type][event.object].remove(cb)
except (KeyError, ValueError):
pass
elif event.time >= cb.time:
if emit_logmsg:
logger.debug("Attempting to call "
"%(function)s in response "
"to %(event)s." % {
'function': cb.wfunction(),
'event': event.type})
cb.wfunction().__call__(event.type, event.object,
event.data, *cb.args, **cb.kwargs)
except Exception:
# something went wrong inside the function we're calling
common.log_exception(logger,
message="Event callback exception caught!")
if emit_logmsg:
logger.debug("Sent '%(type)s' event from "
"'%(object)s' with data '%(data)s'." %
{'type' : event.type, 'object' : repr(event.object),
'data' : repr(event.data)})
def emit_async(self, event):
"""
Same as emit(), but does not block.
"""
glib.idle_add(self.emit, event)
def add_callback(self, function, type, obj, args, kwargs):
"""
Registers a callback.
You should always specify at least one of type or object.
@param function: The function to call [function]
@param type: The 'type' or 'name' of event to listen for. Defaults
to any. [string]
@param obj: The object to listen to events from. Defaults
to any. [string]
Returns a convenience function that you can call to
remove the callback.
"""
with self.lock:
# add the specified categories if needed.
if not self.callbacks.has_key(type):
self.callbacks[type] = weakref.WeakKeyDictionary()
if obj is None:
obj = _NONE
try:
callbacks = self.callbacks[type][obj]
except KeyError:
callbacks = self.callbacks[type][obj] = []
# add the actual callback
callbacks.append(Callback(function, time.time(), args, kwargs))
if self.use_logger:
if not self.logger_filter or re.search(self.logger_filter, type):
logger.debug("Added callback %s for [%s, %s]" %
(function, type, obj))
return lambda: self.remove_callback(function, type, obj)
def remove_callback(self, function, type=None, obj=None):
"""
Unsets a callback
The parameters must match those given when the callback was
registered. (minus any additional args)
"""
if obj is None:
obj = _NONE
remove = []
with self.lock:
try:
callbacks = self.callbacks[type][obj]
for cb in callbacks:
if cb.wfunction() == function:
remove.append(cb)
except KeyError:
return
except TypeError:
return
for cb in remove:
callbacks.remove(cb)
if self.use_logger:
if not self.logger_filter or re.search(self.logger_filter, type):
logger.debug("Removed callback %s for [%s, %s]" %
(function, type, obj))
EVENT_MANAGER = EventManager()
# vim: et sts=4 sw=4
| gpl-2.0 | -9,100,791,220,514,810,000 | 33.056338 | 82 | 0.579487 | false | 4.456321 | false | false | false |
sndrtj/varda | varda/models.py | 1 | 29737 | # -*- coding: utf-8 -*-
"""
Models backed by SQL using SQLAlchemy.
.. note:: All genomic positions in this module are one-based and inclusive.
.. moduleauthor:: Martijn Vermaat <martijn@vermaat.name>
.. Licensed under the MIT license, see the LICENSE file.
"""
from datetime import datetime
from functools import wraps
import gzip
from hashlib import sha1
import hmac
import os
import sqlite3
import uuid
import bcrypt
from flask import current_app
from sqlalchemy import event, Index
from sqlalchemy.engine import Engine
from sqlalchemy.orm.exc import DetachedInstanceError
import werkzeug
from . import db
from .region_binning import assign_bin
# Todo: Use the types for which we have validators.
DATA_SOURCE_FILETYPES = ('bed', 'vcf', 'csv')
OBSERVATION_ZYGOSITIES = ('heterozygous', 'homozygous')
# Note: Add new roles at the end.
USER_ROLES = (
'admin', # Can do anything.
'importer', # Can import samples.
'annotator', # Can annotate samples.
'trader' # Can annotate samples if they are also imported.
)
@event.listens_for(Engine, 'connect')
def set_sqlite_pragma(dbapi_connection, connection_record):
"""
We use foreign keys (and ``ON DELETE CASCADE`` on some of these), but in
SQLite these are only enforced if ``PRAGMA foreign_keys=ON`` is executed
on all connections before use.
[1] http://docs.sqlalchemy.org/en/latest/dialects/sqlite.html#foreign-key-support
"""
if isinstance(dbapi_connection, sqlite3.Connection):
cursor = dbapi_connection.cursor()
cursor.execute('PRAGMA foreign_keys=ON')
cursor.close()
def detached_session_fix(method):
"""
Decorator providing a workaround for a possible bug in Celery.
If `CELERY_ALWAYS_EAGER=True`, the worker can end up with a detached
session when printing its log after an error. This causes an exception,
but with this decorator it is ignored and the method returns `None`.
We use this on the `__repr__` methods of the SQLAlchemy models since they
tend to be called when the log is printed, making debugging a pain.
This is a hacky workaround and I think it's something that could be fixed
in Celery itself.
"""
@wraps(method)
def fixed_method(*args, **kwargs):
try:
return method(*args, **kwargs)
except DetachedInstanceError:
return None
return fixed_method
class InvalidDataSource(Exception):
"""
Exception thrown if data source validation failed.
"""
def __init__(self, code, message):
self.code = code
self.message = message
super(InvalidDataSource, self).__init__(code, message)
class DataUnavailable(Exception):
"""
Exception thrown if reading from a data source which data is not cached
anymore (in case of local storage) or does not exist anymore (in case of
a URL resource.
"""
def __init__(self, code, message):
self.code = code
self.message = message
super(DataUnavailable, self).__init__(code, message)
class User(db.Model):
"""
User in the system.
"""
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
id = db.Column(db.Integer, primary_key=True)
#: User name.
name = db.Column(db.String(200))
#: Unique string used to identify the user.
login = db.Column(db.String(40), index=True, unique=True)
#: Hashed password.
password_hash = db.Column(db.String(100))
#: User email address.
email = db.Column(db.String(200))
#: Bitstring where the leftmost role in the :data:`USER_ROLES` tuple is
#: defined by the least-significant bit. Essentially, this creates a set
#: of roles.
#:
#: You should probably use the :attr:`roles` property instead of accessing
#: this field directly.
roles_bitstring = db.Column(db.Integer)
#: Date and time of creation.
added = db.Column(db.DateTime)
def __init__(self, name, login, password='', password_hash=None,
email=None, roles=None):
"""
If `password_hash` is specified, it is used directly as a bcrypt hash.
Otherwise, the bcrypt hash of `password` is computed.
A bcrypt hash for a password can be computed as follows:
>>> from varda.models import User
>>> User.hash_password('my plaintext password')
'$2a$12$pGK5H8c74SR0Zx0nqHQEU.6qTICkj1WUn1RMzN9NRBFmZFOGE1HF6'
"""
roles = roles or []
self.name = name
self.login = login
self.email = email
self.added = datetime.now()
self.password_hash = password_hash or self.hash_password(password)
self.roles_bitstring = self._encode_roles(roles)
@detached_session_fix
def __repr__(self):
return '<User %r>' % self.login
@staticmethod
def hash_password(password):
return bcrypt.hashpw(password, bcrypt.gensalt())
@staticmethod
def _encode_roles(roles):
return sum(pow(2, i) for i, role
in enumerate(USER_ROLES) if role in roles)
@property
def password(self):
"""
Since we only store the hashed password (in :attr:`password_hash`) and
not the password itself, this is always `None`.
"""
return None
@password.setter
def password(self, password):
"""
Change the password for the user.
"""
self.password_hash = self.hash_password(password)
@property
def roles(self):
"""
A subset of the roles defined in :data:`USER_ROLES`.
"""
return {role for i, role in enumerate(USER_ROLES)
if self.roles_bitstring & pow(2, i)}
@roles.setter
def roles(self, roles):
"""
Change the roles for the user.
:arg roles: Subset of the roles defined in :data:`USER_ROLES`.
:type roles: sequence
"""
self.roles_bitstring = self._encode_roles(roles)
def check_password(self, password):
"""
Return `True` iff `password` matches the user password.
"""
return (bcrypt.hashpw(password, self.password_hash) ==
self.password_hash)
class Token(db.Model):
"""
User token for authentication.
"""
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer,
db.ForeignKey('user.id', ondelete='CASCADE'),
nullable=False)
#: Human-readable name.
name = db.Column(db.String(200))
#: The actual token string.
key = db.Column(db.String(40), index=True, unique=True)
#: Date and time of creation.
added = db.Column(db.DateTime)
#: The :class:`User` owning this token.
user = db.relationship(User,
backref=db.backref('tokens', lazy='dynamic',
cascade='all, delete-orphan',
passive_deletes=True))
def __init__(self, user, name):
self.user = user
self.name = name
self.added = datetime.now()
# Method to generate key taken from Django REST framework.
self.key = hmac.new(uuid.uuid4().bytes, digestmod=sha1).hexdigest()
@detached_session_fix
def __repr__(self):
return '<Token %r>' % self.name
group_membership = db.Table(
'group_membership', db.Model.metadata,
db.Column('sample_id', db.Integer,
db.ForeignKey('sample.id', ondelete='CASCADE'),
nullable=False),
db.Column('group_id', db.Integer,
db.ForeignKey('group.id', ondelete='CASCADE'),
nullable=False))
class Group(db.Model):
"""
Group (e.g. disease type)
"""
__table_args__ = {"mysql_engine": "InnoDB", "mysql_charset": "utf8"}
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)
#: Human readable name
name = db.Column(db.String(200), unique=True)
#: date and time of creation
added = db.Column(db.DateTime)
#: the :class:`User` who created this sample
user = db.relationship(User,
backref=db.backref('groups', lazy='dynamic'))
def __init__(self, user, name):
self.user = user
self.name = name
self.added = datetime.now()
@detached_session_fix
def __repr__(self):
return '<Group %r>' % (self.name)
class Sample(db.Model):
"""
Sample (of one or more individuals).
"""
__tablename__ = 'sample'
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)
group_id = db.Column(db.Integer, db.ForeignKey('group.id'),
nullable=True)
#: Human-readable name.
name = db.Column(db.String(200))
#: Number of individuals.
pool_size = db.Column(db.Integer)
#: Data and time of creation.
added = db.Column(db.DateTime)
#: Set to `True` iff the sample can be included in frequency calculations.
active = db.Column(db.Boolean, default=False)
#: Set to `True` iff the sample has coverage information (i.e., it has one
#: or more :class:`Coverage` entries). If `False`, the sample will not be
#: included in global observation frequencies (usually only the case for
#: population studies).
coverage_profile = db.Column(db.Boolean)
#: Set to `True` iff the sample can be directly queried for observation
#: frequencies by anyone.
public = db.Column(db.Boolean)
#: Textual notes.
#:
#: .. hint:: If you use `Markdown <http://daringfireball.net/projects/markdown/>`_
#: here, the `Aulë <https://github.com/varda/aule>`_ web interface
#: will render it as such.
notes = db.Column(db.Text)
#: The :class:`User` owning this sample.
user = db.relationship(User,
backref=db.backref('samples', lazy='dynamic'))
#: A :class:`Group` to which this sample belongs
group = db.relationship(Group, secondary=group_membership,
cascade='all', passive_deletes=True)
def __init__(self, user, name, pool_size=1, coverage_profile=True,
public=False, notes=None, group=None):
self.user = user
self.name = name
self.pool_size = pool_size
self.added = datetime.now()
self.coverage_profile = coverage_profile
self.public = public
self.notes = notes
self.group = group
@detached_session_fix
def __repr__(self):
return '<Sample %r, pool_size=%r, active=%r, public=%r, group=%r>' \
% (self.name, self.pool_size, self.active, self.public, self.group)
class DataSource(db.Model):
"""
Data source (probably uploaded as a file).
.. note:: Data source :attr:`checksum` values are not forced to be unique,
since several users might upload the same data source and do different
things with it.
"""
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)
#: Human-readable name.
name = db.Column(db.String(200))
#: Name of the file (in the directory defined by the `DATA_DIR`
#: configuration setting) used to store the data.
filename = db.Column(db.String(50))
#: Filetype can be any of the values in :data:`DATA_SOURCE_FILETYPES`.
filetype = db.Column(db.Enum(*DATA_SOURCE_FILETYPES, name='filetype'))
#: Set to `True` iff the data is stored gzip-compressed.
gzipped = db.Column(db.Boolean)
#: Data and time of creation.
added = db.Column(db.DateTime)
#: Checksum of the (uncompressed) data. Can be `None` if it is not yet
#: calculated.
checksum = db.Column(db.String(40))
#: Number of records in the file. Can be `None` if it is not yet
#: calculated.
records = db.Column(db.Integer)
#: The :class:`User` owning this data source.
user = db.relationship(User,
backref=db.backref('data_sources', lazy='dynamic'))
def __init__(self, user, name, filetype, upload=None, local_file=None,
empty=False, gzipped=False):
"""
One of the following three keyword arguments must be specified:
* `upload`: Data is provided as an uploaded file. Specifically,
`upload` is expected to be a :class:`werkzeug.datastructures.FileStorage`
instance.
* `local_file`: Data is locally available in the file with this name
in the directory specified by the `SECONDARY_DATA_DIR` configuration
setting. If the `SECONDARY_DATA_BY_USER` configuration setting is
`True`, an additional subdirectory within `SECONDARY_DATA_DIR` is
used with name equal to `user.login`.
* `empty`: No data is provided for the data source at this point. Data
can be written to it later using the :meth:`data_writer` method.
"""
if not filetype in DATA_SOURCE_FILETYPES:
raise InvalidDataSource('unknown_filetype',
'Data source filetype "%s" is unknown'
% filetype)
self.user = user
self.name = name
self.filename = str(uuid.uuid4())
self.filetype = filetype
self.gzipped = gzipped
self.added = datetime.now()
path = os.path.join(current_app.config['DATA_DIR'],
self.filename)
if upload is not None:
if gzipped:
upload.save(path)
else:
data = gzip.open(path, 'wb')
data.write(upload.read())
data.close()
self.gzipped = True
elif local_file is not None:
if not current_app.config['SECONDARY_DATA_DIR']:
raise InvalidDataSource(
'invalid_data', 'Referencing local data files is not '
'allowed by system configuration')
if current_app.config['SECONDARY_DATA_BY_USER']:
local_dir = os.path.join(current_app.config['SECONDARY_DATA_DIR'],
user.login)
else:
local_dir = current_app.config['SECONDARY_DATA_DIR']
local_path = os.path.join(local_dir,
werkzeug.secure_filename(local_file))
if not os.path.isfile(local_path):
raise InvalidDataSource(
'invalid_data', 'Local data file referenced does not exist')
os.symlink(local_path, path)
elif not empty:
raise InvalidDataSource('invalid_data', 'No data supplied')
@detached_session_fix
def __repr__(self):
return '<DataSource %r, filename=%r, filetype=%r, records=%r>' \
% (self.name, self.filename, self.filetype, self.records)
def data(self):
"""
Get open file-like handle to data contained in this data source for
reading.
.. note:: Be sure to close after calling this.
"""
filepath = os.path.join(current_app.config['DATA_DIR'],
self.filename)
try:
if self.gzipped:
return gzip.open(filepath)
else:
return open(filepath)
except EnvironmentError:
raise DataUnavailable('data_source_not_cached',
'Data source is not in the cache')
def data_writer(self):
"""
Get open file-like handle to data contained in this data source for
writing.
.. note:: Be sure to close after calling this.
"""
filepath = os.path.join(current_app.config['DATA_DIR'],
self.filename)
try:
if self.gzipped:
return gzip.open(filepath, 'wb')
else:
return open(filepath, 'wb')
except EnvironmentError:
raise DataUnavailable('data_source_not_cached',
'Data source is not in the cache')
def empty(self):
"""
Remove all data from this data source.
"""
with self.data_writer():
pass
def local_path(self):
"""
Get a local filepath for the data.
"""
return os.path.join(current_app.config['DATA_DIR'], self.filename)
class Variation(db.Model):
"""
Coupling between a :class:`Sample`, a :class:`DataSource`, and a set of
:class:`Observation`s.
"""
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
id = db.Column(db.Integer, primary_key=True)
sample_id = db.Column(db.Integer,
db.ForeignKey('sample.id', ondelete='CASCADE'),
nullable=False)
data_source_id = db.Column(db.Integer, db.ForeignKey('data_source.id'),
nullable=False)
task_done = db.Column(db.Boolean, default=False)
task_uuid = db.Column(db.String(36))
#: Set to `True` iff observations not passing the filter (i.e., having a
#: value other than ``PASS` in the VCF file) are discarded.
skip_filtered = db.Column(db.Boolean)
#: Set to `True` iff genotype information (i.e., the ``GT`` value in the
#: VCF file) is used to deduce observation :attr:`Observation.support` and
#: :attr:`Observation.zygosity`. See also
#: :attr:`prefere_genotype_likelihoods`.
use_genotypes = db.Column(db.Boolean)
#: Set to `True` iff genotype likelihoods (i.e., the ``GL`` and ``PL``
#: values in the VCF file) are prefered over genotype information. Only
#: used if :attr:`use_genotypes` is `True`.
prefer_genotype_likelihoods = db.Column(db.Boolean)
#: The :class:`Sample` this set of :class:`Observation`s belong to.
sample = db.relationship(Sample,
backref=db.backref('variations', lazy='dynamic',
cascade='all, delete-orphan',
passive_deletes=True))
#: The :class:`DataSource` this set of :class:`Observation`s are imported
#: from.
data_source = db.relationship(DataSource,
backref=db.backref('variations',
lazy='dynamic'))
def __init__(self, sample, data_source, skip_filtered=True,
use_genotypes=True, prefer_genotype_likelihoods=False):
self.sample = sample
self.data_source = data_source
self.skip_filtered = skip_filtered
self.use_genotypes = use_genotypes
self.prefer_genotype_likelihoods = prefer_genotype_likelihoods
@detached_session_fix
def __repr__(self):
return '<Variation task_done=%r, task_uuid=%r>' % (self.task_done,
self.task_uuid)
class Coverage(db.Model):
"""
Coupling between a :class:`Sample`, a :class:`DataSource`, and a set of
:class:`Region`s.
"""
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
id = db.Column(db.Integer, primary_key=True)
sample_id = db.Column(db.Integer,
db.ForeignKey('sample.id', ondelete='CASCADE'),
nullable=False)
data_source_id = db.Column(db.Integer, db.ForeignKey('data_source.id'),
nullable=False)
task_done = db.Column(db.Boolean, default=False)
task_uuid = db.Column(db.String(36))
#: The :class:`Sample` this set of :class:`Region`s belong to.
sample = db.relationship(Sample,
backref=db.backref('coverages', lazy='dynamic',
cascade='all, delete-orphan',
passive_deletes=True))
#: The :class:`DataSource` this set of :class:`Region`s are imported from.
data_source = db.relationship(DataSource,
backref=db.backref('coverages',
lazy='dynamic'))
def __init__(self, sample, data_source):
self.sample = sample
self.data_source = data_source
@detached_session_fix
def __repr__(self):
return '<Coverage task_done=%r, task_uuid=%r>' % (self.task_done,
self.task_uuid)
sample_frequency = db.Table(
'sample_frequency', db.Model.metadata,
db.Column('annotation_id', db.Integer,
db.ForeignKey('annotation.id', ondelete='CASCADE'),
nullable=False),
db.Column('sample_id', db.Integer,
db.ForeignKey('sample.id', ondelete='CASCADE'),
nullable=False))
class Annotation(db.Model):
"""
Annotation of a data source.
"""
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
id = db.Column(db.Integer, primary_key=True)
original_data_source_id = db.Column(db.Integer,
db.ForeignKey('data_source.id'),
nullable=False)
annotated_data_source_id = db.Column(db.Integer,
db.ForeignKey('data_source.id'),
nullable=False)
task_done = db.Column(db.Boolean, default=False)
task_uuid = db.Column(db.String(36))
#: Set to `True` iff global observation frequencies are annotated.
global_frequency = db.Column(db.Boolean)
#: A link to each :class:`Sample` for which observation frequencies are
#: annotated.
sample_frequency = db.relationship(Sample, secondary=sample_frequency,
cascade='all', passive_deletes=True)
#: Query field for groups. Should be a list of dictionaries, which is serialized by pickle
#: e.g. [{'group1': False, 'group2': True}, {'group1': True, 'group2': False}]
group_query = db.Column(db.PickleType)
#: The original :class:`DataSource` that is being annotated.
original_data_source = db.relationship(
DataSource,
primaryjoin='DataSource.id==Annotation.original_data_source_id',
backref=db.backref('annotations', lazy='dynamic'))
#: The annotated :class:`DataSource` data source.
annotated_data_source = db.relationship(
DataSource,
primaryjoin='DataSource.id==Annotation.annotated_data_source_id',
backref=db.backref('annotation', uselist=False, lazy='select'))
def __init__(self, original_data_source, annotated_data_source,
global_frequency=True, sample_frequency=None, group_query=None):
sample_frequency = sample_frequency or []
self.original_data_source = original_data_source
self.annotated_data_source = annotated_data_source
self.global_frequency = global_frequency
self.sample_frequency = sample_frequency
self.group_query = group_query
@detached_session_fix
def __repr__(self):
return '<Annotation task_done=%r, task_uuid=%r>' % (self.task_done,
self.task_uuid)
class Observation(db.Model):
"""
Observation of a variant in a sample (one or more individuals).
"""
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
id = db.Column(db.Integer, primary_key=True)
variation_id = db.Column(db.Integer,
db.ForeignKey('variation.id', ondelete='CASCADE'),
index=True, nullable=False)
#: Reference genome chromosome name.
chromosome = db.Column(db.String(30))
#: Position is one-based, and defines where :attr:`reference` and
#: :attr:`observed` start on the reference genome.
position = db.Column(db.Integer)
# Todo: Should we perhaps also store the end position? Would make it
# easier to query for variants overlapping some position. Perhaps it's
# enough to have a computed index for len(reference)?
#: Reference sequence, can be empty for an insertion.
reference = db.Column(db.String(200))
#: Observed sequence, can be empty for a deletion.
observed = db.Column(db.String(200))
#: Bin index that can be used for faster range-limited querying. See the
#: :mod:`region_binning` module for more information.
#:
#: .. note:: Bin indices are always calculated on non-empty ranges, so for
#: an insertion we (somewhat arbitrarily) choose the first base next
#: to it as its range, although technically it spans only the empty
#: range.
bin = db.Column(db.Integer)
#: Zygosity can be any of the values in :data:`OBSERVATION_ZYGOSITIES`, or
#: `None` (meaning that the exact genotype is unknown, but the variant
#: allele was observed).
zygosity = db.Column(db.Enum(*OBSERVATION_ZYGOSITIES, name='zygosity'))
#: Number of individuals the variant was observed in.
support = db.Column(db.Integer)
#: The :class:`Variation` linking this observation to a :class:`Sample`
#: and a :class:`DataSource`.
variation = db.relationship(Variation,
backref=db.backref('observations',
lazy='dynamic',
cascade='all, delete-orphan',
passive_deletes=True))
def __init__(self, variation, chromosome, position, reference, observed,
zygosity=None, support=1):
self.variation = variation
self.chromosome = chromosome
self.position = position
self.reference = reference
self.observed = observed
# We choose the 'region' of the reference covered by an insertion to
# be the base next to it.
self.bin = assign_bin(self.position,
self.position + max(1, len(self.reference)) - 1)
self.zygosity = zygosity
self.support = support
@detached_session_fix
def __repr__(self):
return '<Observation chromosome=%r, position=%r, reference=%r, ' \
'observed=%r, zygosity=%r, support=%r>' \
% (self.chromosome, self.position, self.reference, self.observed,
self.zygosity, self.support)
def is_deletion(self):
"""
Return `True` iff this observation is a deletion.
"""
return self.observed == ''
def is_insertion(self):
"""
Return `True` iff this observation is an insertion.
"""
return self.reference == ''
def is_snv(self):
"""
Return `True` iff this observation is a single nucleotide variant.
"""
return len(self.observed) == len(self.reference) == 1
def is_indel(self):
"""
Return `True` iff this observation is neither a deletion, insertion,
or single nucleotide variant.
"""
return not (self.is_deletion() or
self.is_insertion() or
self.is_snv())
Index('observation_location',
Observation.bin, Observation.chromosome, Observation.position)
class Region(db.Model):
"""
Covered region for variant calling in a sample (one or more individuals).
"""
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
id = db.Column(db.Integer, primary_key=True)
coverage_id = db.Column(db.Integer,
db.ForeignKey('coverage.id', ondelete='CASCADE'),
index=True, nullable=False)
#: Reference genome chromosome name.
chromosome = db.Column(db.String(30))
#: Begin of the region, one-based and inclusive.
begin = db.Column(db.Integer)
#: End of the region, one-based and inclusive.
end = db.Column(db.Integer)
#: Bin index that can be used for faster range-limited querying. See the
#: :mod:`region_binning` module for more information.
bin = db.Column(db.Integer)
# Todo: Perhaps we might want to have a `support` column here similar to
# the Observation model? It only makes sense if we accept BED files
# with a `support` integer for each region.
#: The :class:`Coverage` linking this observation to a :class:`Sample` and
#: a :class:`DataSource`.
coverage = db.relationship(Coverage,
backref=db.backref('regions', lazy='dynamic',
cascade='all, delete-orphan',
passive_deletes=True))
def __init__(self, coverage, chromosome, begin, end):
self.coverage = coverage
self.chromosome = chromosome
self.begin = begin
self.end = end
self.bin = assign_bin(self.begin, self.end)
@detached_session_fix
def __repr__(self):
return '<Region chromosome=%r, begin=%r, end=%r>' \
% (self.chromosome, self.begin, self.end)
Index('region_location',
Region.bin, Region.chromosome, Region.begin)
| mit | 7,266,825,592,961,211,000 | 34.913043 | 94 | 0.588949 | false | 4.066184 | true | false | false |
raulperula/uco_student | iaic/practices/pyclassify/ayuda_ui.py | 1 | 1234 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = "Raul Perula-Martinez"
__email__ = "raul.perula@uc3m.es"
__date__ = "2014-11"
__license__ = "GPL v3"
__version__ = "1.0.0"
# File: ayuda_ui.py
"""
Arquitectura de referencia en commonKads.
Modelo de la Aplicacion.
Este modulo contiene la interfaz de la ventana de ayuda de la aplicacion
y la gestion de eventos de la misma.
"""
__author__ = "Manuel Pedrero Luque <i62pelum@uco.es>"
__author__ = "Raul Perula Martinez <i62pemar@uco.es>"
__author__ = "Miguel Angel Sanchez Muñoz <i52samum@uco.es>"
__date__ = "01 de julio 2010"
__version__ = "$ Revision: 1 $"
__credits__ = """Universidad de Cordoba"""
import sys
from PyQt4 import QtCore, QtGui
from PyQt4.QtGui import *
from ayuda import Ui_ayuda_form
# Clase de la ventana Acerca de, gestiona los eventos de la interfaz
class AyudaWidget(QtGui.QWidget):
def __init__(self, parent=None):
'''Muestra la ventana con el visor de la ayuda
@return: Nada
'''
QtGui.QWidget.__init__(self, parent)
self.ui = Ui_ayuda_form()
self.ui.setupUi(self)
# Recargamos la pagina de la ayuda
self.ui.ayuda_textBrowser.reload();
# Gestion de eventos
# Si se pulsa el boton aceptar, se cierra la ventana
| gpl-3.0 | 394,239,549,965,202,700 | 23.176471 | 72 | 0.67721 | false | 2.511202 | false | false | false |
lu18887/perhapsgeekblog | perhapsgeek/zinnia/tests/test_admin_widgets.py | 4 | 2080 | # coding=utf-8
"""Test cases for Zinnia's admin widgets"""
from django.test import TestCase
from django.utils.encoding import smart_text
from zinnia.admin.widgets import MPTTFilteredSelectMultiple
class MPTTFilteredSelectMultipleTestCase(TestCase):
def test_render_option(self):
widget = MPTTFilteredSelectMultiple('test', False)
option = widget.render_option([], 1, 'Test', (4, 5))
self.assertEqual(
option,
'<option value="1" data-tree-id="4"'
' data-left-value="5">Test</option>')
option = widget.render_option(['0', '1', '2'], 1, 'Test', (4, 5))
self.assertEqual(
option,
'<option value="1" selected="selected" data-tree-id="4"'
' data-left-value="5">Test</option>')
def test_render_option_non_ascii_issue_317(self):
widget = MPTTFilteredSelectMultiple('test', False)
option = widget.render_option([], 1, 'тест', (1, 1))
self.assertEqual(
option,
smart_text('<option value="1" data-tree-id="1"'
' data-left-value="1">тест</option>'))
def test_render_options(self):
widget = MPTTFilteredSelectMultiple('test', False)
self.assertEqual(widget.render_options([], []), '')
options = widget.render_options([
(1, 'Category 1', (1, 1)),
(2, '|-- Category 2', (1, 2))], [])
self.assertEqual(
options,
'<option value="1" data-tree-id="1" data-left-value="1">'
'Category 1</option>\n<option value="2" data-tree-id="1" '
'data-left-value="2">|-- Category 2</option>')
options = widget.render_options([
(1, 'Category 1', (1, 1)),
(2, '|-- Category 2', (1, 2))], [2])
self.assertEqual(
options,
'<option value="1" data-tree-id="1" data-left-value="1">'
'Category 1</option>\n<option value="2" selected="selected" '
'data-tree-id="1" data-left-value="2">|-- Category 2</option>')
| mit | 7,590,448,066,463,518,000 | 33.533333 | 75 | 0.553571 | false | 3.667257 | true | false | false |
mozilla/kitsune | kitsune/products/views.py | 1 | 3266 | import json
from django.http import HttpResponse
from django.shortcuts import get_object_or_404, render
from product_details import product_details
from kitsune.products.models import Product, Topic
from kitsune.wiki.decorators import check_simple_wiki_locale
from kitsune.wiki.facets import documents_for, topics_for
from kitsune.wiki.utils import get_featured_articles
@check_simple_wiki_locale
def product_list(request):
"""The product picker page."""
template = "products/products.html"
products = Product.objects.filter(visible=True)
return render(request, template, {"products": products})
@check_simple_wiki_locale
def product_landing(request, slug):
"""The product landing page."""
product = get_object_or_404(Product, slug=slug)
user = request.user
template = "products/product.html"
if request.is_ajax():
# Return a list of topics/subtopics for the product
topic_list = list()
for t in Topic.objects.filter(product=product, visible=True):
topic_list.append({"id": t.id, "title": t.title})
return HttpResponse(json.dumps({"topics": topic_list}), content_type="application/json")
if slug == "firefox":
latest_version = product_details.firefox_versions["LATEST_FIREFOX_VERSION"]
else:
versions = product.versions.filter(default=True)
if versions:
latest_version = versions[0].min_version
else:
latest_version = 0
return render(
request,
template,
{
"product": product,
"products": Product.objects.filter(visible=True),
"topics": topics_for(product=product, parent=None),
"search_params": {"product": slug},
"latest_version": latest_version,
"subscribed_products_ids": (
user.profile.products.all().values_list("id", flat=True)
if user.is_authenticated
else []
),
"featured": get_featured_articles(product, locale=request.LANGUAGE_CODE),
},
)
@check_simple_wiki_locale
def document_listing(request, product_slug, topic_slug, subtopic_slug=None):
"""The document listing page for a product + topic."""
product = get_object_or_404(Product, slug=product_slug)
topic = get_object_or_404(Topic, slug=topic_slug, product=product, parent__isnull=True)
template = "products/documents.html"
doc_kw = {"locale": request.LANGUAGE_CODE, "products": [product]}
if subtopic_slug is not None:
subtopic = get_object_or_404(Topic, slug=subtopic_slug, product=product, parent=topic)
doc_kw["topics"] = [subtopic]
else:
subtopic = None
doc_kw["topics"] = [topic]
documents, fallback_documents = documents_for(**doc_kw)
return render(
request,
template,
{
"product": product,
"topic": topic,
"subtopic": subtopic,
"topics": topics_for(product=product, parent=None),
"subtopics": topics_for(product=product, parent=topic),
"documents": documents,
"fallback_documents": fallback_documents,
"search_params": {"product": product_slug},
},
)
| bsd-3-clause | -1,201,585,128,838,793,500 | 33.744681 | 96 | 0.631966 | false | 3.978076 | true | false | false |