Dataset Preview
The full dataset viewer is not available (click to read why). Only showing a preview of the rows.
The dataset generation failed
Error code: DatasetGenerationError
Exception: ArrowNotImplementedError
Message: Cannot write struct type 'file' with no child field to Parquet. Consider adding a dummy child field.
Traceback: Traceback (most recent call last):
File "/usr/local/lib/python3.12/site-packages/datasets/builder.py", line 1831, in _prepare_split_single
writer.write_table(table)
File "/usr/local/lib/python3.12/site-packages/datasets/arrow_writer.py", line 712, in write_table
self._build_writer(inferred_schema=pa_table.schema)
File "/usr/local/lib/python3.12/site-packages/datasets/arrow_writer.py", line 757, in _build_writer
self.pa_writer = pq.ParquetWriter(
^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/site-packages/pyarrow/parquet/core.py", line 1070, in __init__
self.writer = _parquet.ParquetWriter(
^^^^^^^^^^^^^^^^^^^^^^^
File "pyarrow/_parquet.pyx", line 2363, in pyarrow._parquet.ParquetWriter.__cinit__
File "pyarrow/error.pxi", line 155, in pyarrow.lib.pyarrow_internal_check_status
File "pyarrow/error.pxi", line 92, in pyarrow.lib.check_status
pyarrow.lib.ArrowNotImplementedError: Cannot write struct type 'file' with no child field to Parquet. Consider adding a dummy child field.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.12/site-packages/datasets/builder.py", line 1847, in _prepare_split_single
num_examples, num_bytes = writer.finalize()
^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/site-packages/datasets/arrow_writer.py", line 731, in finalize
self._build_writer(self.schema)
File "/usr/local/lib/python3.12/site-packages/datasets/arrow_writer.py", line 757, in _build_writer
self.pa_writer = pq.ParquetWriter(
^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/site-packages/pyarrow/parquet/core.py", line 1070, in __init__
self.writer = _parquet.ParquetWriter(
^^^^^^^^^^^^^^^^^^^^^^^
File "pyarrow/_parquet.pyx", line 2363, in pyarrow._parquet.ParquetWriter.__cinit__
File "pyarrow/error.pxi", line 155, in pyarrow.lib.pyarrow_internal_check_status
File "pyarrow/error.pxi", line 92, in pyarrow.lib.check_status
pyarrow.lib.ArrowNotImplementedError: Cannot write struct type 'file' with no child field to Parquet. Consider adding a dummy child field.
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1455, in compute_config_parquet_and_info_response
parquet_operations = convert_to_parquet(builder)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1054, in convert_to_parquet
builder.download_and_prepare(
File "/usr/local/lib/python3.12/site-packages/datasets/builder.py", line 894, in download_and_prepare
self._download_and_prepare(
File "/usr/local/lib/python3.12/site-packages/datasets/builder.py", line 970, in _download_and_prepare
self._prepare_split(split_generator, **prepare_split_kwargs)
File "/usr/local/lib/python3.12/site-packages/datasets/builder.py", line 1702, in _prepare_split
for job_id, done, content in self._prepare_split_single(
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/site-packages/datasets/builder.py", line 1858, in _prepare_split_single
raise DatasetGenerationError("An error occurred while generating the dataset") from e
datasets.exceptions.DatasetGenerationError: An error occurred while generating the datasetNeed help to make the dataset viewer work? Make sure to review how to configure the dataset viewer, and open a discussion for direct support.
before
string | actions
list | language
string | meta
dict |
|---|---|---|---|
import os
import datetime
import pytest
from doit import tools
from doit import task
def test_create_folder():
def rm_dir():
if os.path.exists(DIR_DEP):
os.removedirs(DIR_DEP)
DIR_DEP = os.path.join(os.path.dirname(__file__),"parent/child/")
rm_dir()
assert True == tools.create_folder(DIR_DEP)
assert os.path.exists(DIR_DEP)
rm_dir()
class TestTitleWithActions(object):
def test_actions(self):
t = task.Task("MyName",["MyAction"], title=tools.title_with_actions)
assert "MyName => Cmd: MyAction" == t.title()
def test_group(self):
t = task.Task("MyName", None, file_dep=['file_foo'],
task_dep=['t1','t2'], title=tools.title_with_actions)
assert "MyName => Group: t1, t2" == t.title()
class TestRunOnce(object):
def test_run(self):
t = task.Task("TaskX", None, uptodate=[tools.run_once])
assert False == tools.run_once(t, t.values)
t.execute()
assert True == tools.run_once(t, t.values)
class TestConfigChanged(object):\
def test_invalid_type(self):
class NotValid(object):pass
uptodate = tools.config_changed(NotValid())
pytest.raises(Exception, uptodate, None, None)
def test_string(self):
ua = tools.config_changed('a')
ub = tools.config_changed('b')
t1 = task.Task("TaskX", None, uptodate=[ua])
assert False == ua(t1, t1.values)
assert False == ub(t1, t1.values)
t1.execute()
assert True == ua(t1, t1.values)
assert False == ub(t1, t1.values)
def test_dict(self):
ua = tools.config_changed({'x':'a', 'y':1})
ub = tools.config_changed({'x':'b', 'y':1})
t1 = task.Task("TaskX", None, uptodate=[ua])
assert False == ua(t1, t1.values)
assert False == ub(t1, t1.values)
t1.execute()
assert True == ua(t1, t1.values)
assert False == ub(t1, t1.values)
class TestTimeout(object):
def test_invalid(self):
pytest.raises(Exception, tools.timeout, "abc")
def test_int(self, monkeypatch):
monkeypatch.setattr(tools.time, 'time', lambda: 100)
t = task.Task("TaskX", None, uptodate=[tools.timeout(5)])
assert False == t.uptodate[0](t, t.values)
t.execute()
assert 100 == t.values['success-time']
monkeypatch.setattr(tools.time, 'time', lambda: 103)
assert True == t.uptodate[0](t, t.values)
monkeypatch.setattr(tools.time, 'time', lambda: 106)
assert False == t.uptodate[0](t, t.values)
def test_timedelta(self, monkeypatch):
monkeypatch.setattr(tools.time, 'time', lambda: 10)
limit = datetime.timedelta(minutes=2)
t = task.Task("TaskX", None, uptodate=[tools.timeout(limit)])
assert False == t.uptodate[0](t, t.values)
t.execute()
assert 10 == t.values['success-time']
monkeypatch.setattr(tools.time, 'time', lambda: 100)
assert True == t.uptodate[0](t, t.values)
monkeypatch.setattr(tools.time, 'time', lambda: 200)
assert False == t.uptodate[0](t, t.values)
def test_timedelta_big(self, monkeypatch):
monkeypatch.setattr(tools.time, 'time', lambda: 10)
limit = datetime.timedelta(days=2, minutes=5)
t = task.Task("TaskX", None, uptodate=[tools.timeout(limit)])
assert False == t.uptodate[0](t, t.values)
t.execute()
assert 10 == t.values['success-time']
monkeypatch.setattr(tools.time, 'time', lambda: 3600 * 30)
assert True == t.uptodate[0](t, t.values)
monkeypatch.setattr(tools.time, 'time', lambda: 3600 * 49)
assert False == t.uptodate[0](t, t.values)
class TestCheckTimestampUnchanged(object):
def patch_os_stat(self, monkeypatch, fake_path,
st_mode=33188, st_ino=402886990, st_dev=65024L,
st_nlink=1, st_uid=0, st_gid=0, st_size=0,
st_atime=1317297141, st_mtime=1317297140,
st_ctime=1317297141):
"""helper to patch os.stat for one specific path."""
real_stat = os.stat
def fake_stat(path):
if path == fake_path:
return os.stat_result((st_mode, st_ino, st_dev, st_nlink,
st_uid, st_gid, st_size,
st_atime, st_mtime, st_ctime))
else:
return real_stat(path)
monkeypatch.setattr(os, 'stat', fake_stat)
# @todo: maybe parametrize test_atime, test_ctime, test_mtime and
# test_op_custom? a lot of repetition there
def test_atime(self, monkeypatch):
check = tools.check_timestamp_unchanged('check_atime', 'atime')
self.patch_os_stat(monkeypatch, 'check_atime', st_atime=1317460678)
t = task.Task("TaskX", None, uptodate=[check])
# no stored value/first run
assert False == check(t, t.values)
# value just stored
t.execute()
assert True == check(t, t.values)
# file has changed, should now re-execute
monkeypatch.undo()
self.patch_os_stat(monkeypatch, 'check_atime', st_atime=1317470015)
assert False == check(t, t.values)
def test_ctime(self, monkeypatch):
check = tools.check_timestamp_unchanged('check_ctime', 'ctime')
self.patch_os_stat(monkeypatch, 'check_ctime', st_ctime=1317460678)
t = task.Task("TaskX", None, uptodate=[check])
# no stored value/first run
assert False == check(t, t.values)
# value just stored
t.execute()
assert True == check(t, t.values)
# file has changed, should now re-execute
monkeypatch.undo()
self.patch_os_stat(monkeypatch, 'check_ctime', st_ctime=1317470015)
assert False == check(t, t.values)
def test_mtime(self, monkeypatch):
check = tools.check_timestamp_unchanged('check_mtime', 'mtime')
self.patch_os_stat(monkeypatch, 'check_mtime', st_mtime=1317460678)
t = task.Task("TaskX", None, uptodate=[check])
# no stored value/first run
assert False == check(t, t.values)
# value just stored
t.execute()
assert True == check(t, t.values)
# file has changed, should now re-execute
monkeypatch.undo()
self.patch_os_stat(monkeypatch, 'check_mtime', st_mtime=1317470015)
assert False == check(t, t.values)
def test_invalid_time(self):
with pytest.raises(ValueError):
tools.check_timestamp_unchanged('check_invalid_time', 'foo')
def test_file_missing(self):
# @todo: do we need to distinguish between file gone missing (e.g.
# prev_time is valid but current_time not) and the case where
# we never seen the file (neither prev_time nor current_time
# valid); the latter could e.g. be a typo by the user
# no such file at all
check = tools.check_timestamp_unchanged('no_such_file')
t = task.Task("TaskX", None, uptodate=[check])
assert False == check(t, t.values)
# file gone missing
self.patch_os_stat(monkeypatch, 'file_missing', st_ctime=1317460678)
check = tools.check_timestamp_unchanged('file_missing')
t = task.Task("TaskX", None, uptodate=[check])
t.execute()
assert True == check(t, t.values)
monkeypatch.undo()
assert False == check(t, t.values)
def test_op_gt(self):
pass
def test_op_gt_file_missing(self):
pass
def test_op_custom(self):
pass
|
[
{
"type": "go_to",
"params": {
"line": 2,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "import operator\n",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 138,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 138,
"column": 0
},
"end": {
"line": 139,
"column": 56
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " # @",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "todo: maybe pa",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 138,
"column": 11,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 138,
"column": 21,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "rametrize test_atime, test_ctime ",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 138,
"column": 49,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 138,
"column": 54,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "and test_mtime? a lot of\n # ",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 139,
"column": 11,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " repetition there\n",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 196,
"column": 30,
"text": null,
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 196,
"column": 30
},
"end": {
"line": 196,
"column": 30
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": ", monkeypatch",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 216,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 216,
"column": 0
},
"end": {
"line": 217,
"column": 13
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " def test_op_gt(self, monkeypatch):\n ",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " self.p",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 217,
"column": 4,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 217,
"column": 14,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "atch_os_stat(monkeypatch, 'check_gt', st_mtime=1317",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 217,
"column": 60,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 217,
"column": 65,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "460678)\n check = tools.check_timestamp_unchanged('check_gt', op=operator.gt)\n ",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 219,
"column": 6,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " t = task.Task(\"Task",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "X\", None, u",
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 219,
"column": 36
},
"end": {
"line": 219,
"column": 38
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " u",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "ptodate=[check])\n",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 219,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 219,
"column": 0
},
"end": {
"line": 220,
"column": 13
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " # no stored ",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "value/first run\n ",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " assert False == ",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "check(t, t.values)\n",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 222,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 222,
"column": 0
},
"end": {
"line": 223,
"column": 13
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " # value just stored is not greater",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " than itself\n t.execute()\n ",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 224,
"column": 3,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 224,
"column": 7,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " assert False == chec",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "k(t, t.values)\n\n # file t",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 226,
"column": 14,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 226,
"column": 16,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "imestamp greater than stored, up to ",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 226,
"column": 52,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "date\n monkeypatch.",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "undo()\n self.p",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "atch_os_stat(monkeypatch, 'c",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "heck_gt', st_mtime=13",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 228,
"column": 59,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 228,
"column": 63,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "17470015)\n ",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " assert True == check(t, t.value",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 229,
"column": 39,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "s)\n\n def test_op_bad_cust",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "om(self, monkeypatch):\n # handling mis",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "behaving custo",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "m operators\n ",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 233,
"column": 5,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " def bad_",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 233,
"column": 7,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 233,
"column": 16,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "op(prev_time",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": ", current_time):\n ",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 234,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 234,
"column": 1,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " raise Exce",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 234,
"column": 22,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "ption('oops')\n\n self.patch_",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "os_stat(monkeypatch, 'check_bad', st_mtim",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "e=1317460678)\n check",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 237,
"column": 11,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 237,
"column": 13,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " = tools.chec",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "k_timestamp_unchanged('check_bad', o",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 237,
"column": 55,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 237,
"column": 62,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "p=bad_op)\n t = task.Task(\"TaskX\", None, uptodate=[check])\n\n # if unsure opt for out-of-date\n assert",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 241,
"column": 12,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 241,
"column": 14,
"text": null,
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 241,
"column": 14,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " False == che",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "ck(t, t.values)\n t.execute()\n asse",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "rt False == check(t, t",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": ".values)\n",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
}
] |
python
|
{
"dataset": "bigcode/commitpack",
"split": "train",
"mode": "file",
"repo": "Hinidu/doit,wangpanjun/doit,gh0std4ncer/doit,lelit/doit,JohannesBuchner/doit,pydoit/doit,saimn/doit,lelit/doit,lelit/doit,lelit/doit",
"status": "M",
"commit": "4553d0098f8564a5b650e664760b49e2c1dc50b0",
"parent": null,
"path": "tests/test_tools.py",
"old_path": "tests/test_tools.py",
"hunk_index": null,
"adapter_meta": {
"commit": {
"message": "Added tests for operators in `check_timestamp_unchanged`.\nA gt (>) operator as an example and a misbehaving custom operator.\nAlso added a missing parameter to file_missing test :P\nDecided that testing a custom op would be really testing the implementation of the operator so will only test a misbehaving op for now. Also file_missing with gt op doesn't add anything.\n\n--HG--\nextra : convert_revision : tzeentch.gm%40gmail.com-20111001201631-11y7acpu0rt54jpw\n",
"author_date": null,
"committer_date": null,
"stats": null,
"dataset_context": {
"dataset": "bigcode/commitpack",
"split": "train"
}
},
"file": {}
}
}
|
# Copyright (c) 2015-2016 Vector 35 LLC
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import ctypes
import traceback
# Binary Ninja components
import _binaryninjacore as core
from enums import FormInputFieldType, MessageBoxIcon, MessageBoxButtonResult
import binaryview
import log
class LabelField(object):
def __init__(self, text):
self.text = text
def _fill_core_struct(self, value):
value.type = FormInputFieldType.LabelFormField
value.prompt = self.text
def _fill_core_result(self, value):
pass
def _get_result(self, value):
pass
class SeparatorField(object):
def _fill_core_struct(self, value):
value.type = FormInputFieldType.SeparatorFormField
def _fill_core_result(self, value):
pass
def _get_result(self, value):
pass
class TextLineField(object):
def __init__(self, prompt):
self.prompt = prompt
self.result = None
def _fill_core_struct(self, value):
value.type = FormInputFieldType.TextLineFormField
value.prompt = self.prompt
def _fill_core_result(self, value):
value.stringResult = core.BNAllocString(str(self.result))
def _get_result(self, value):
self.result = value.stringResult
class MultilineTextField(object):
def __init__(self, prompt):
self.prompt = prompt
self.result = None
def _fill_core_struct(self, value):
value.type = FormInputFieldType.MultilineTextFormField
value.prompt = self.prompt
def _fill_core_result(self, value):
value.stringResult = core.BNAllocString(str(self.result))
def _get_result(self, value):
self.result = value.stringResult
class IntegerField(object):
def __init__(self, prompt):
self.prompt = prompt
self.result = None
def _fill_core_struct(self, value):
value.type = FormInputFieldType.IntegerFormField
value.prompt = self.prompt
def _fill_core_result(self, value):
value.intResult = self.result
def _get_result(self, value):
self.result = value.intResult
class AddressField(object):
def __init__(self, prompt, view = None, current_address = 0):
self.prompt = prompt
self.view = view
self.current_address = current_address
self.result = None
def _fill_core_struct(self, value):
value.type = FormInputFieldType.AddressFormField
value.prompt = self.prompt
value.view = None
if self.view is not None:
value.view = self.view.handle
value.currentAddress = self.current_address
def _fill_core_result(self, value):
value.addressResult = self.result
def _get_result(self, value):
self.result = value.addressResult
class ChoiceField(object):
def __init__(self, prompt, choices):
self.prompt = prompt
self.choices = choices
self.result = None
def _fill_core_struct(self, value):
value.type = FormInputFieldType.ChoiceFormField
value.prompt = self.prompt
choice_buf = (ctypes.c_char_p * len(self.choices))()
for i in xrange(0, len(self.choices)):
choice_buf[i] = str(self.choices[i])
value.choices = choice_buf
value.count = len(self.choices)
def _fill_core_result(self, value):
value.indexResult = self.result
def _get_result(self, value):
self.result = value.indexResult
class OpenFileNameField(object):
def __init__(self, prompt, ext = ""):
self.prompt = prompt
self.ext = ext
self.result = None
def _fill_core_struct(self, value):
value.type = FormInputFieldType.OpenFileNameFormField
value.prompt = self.prompt
value.ext = self.ext
def _fill_core_result(self, value):
value.stringResult = core.BNAllocString(str(self.result))
def _get_result(self, value):
self.result = value.stringResult
class SaveFileNameField(object):
def __init__(self, prompt, ext = "", default_name = ""):
self.prompt = prompt
self.ext = ext
self.default_name = default_name
self.result = None
def _fill_core_struct(self, value):
value.type = FormInputFieldType.SaveFileNameFormField
value.prompt = self.prompt
value.ext = self.ext
value.defaultName = self.default_name
def _fill_core_result(self, value):
value.stringResult = core.BNAllocString(str(self.result))
def _get_result(self, value):
self.result = value.stringResult
class DirectoryNameField(object):
def __init__(self, prompt, default_name = ""):
self.prompt = prompt
self.default_name = default_name
self.result = None
def _fill_core_struct(self, value):
value.type = DirectoryNameField
value.prompt = self.prompt
value.defaultName = self.default_name
def _fill_core_result(self, value):
value.stringResult = core.BNAllocString(str(self.result))
def _get_result(self, value):
self.result = value.stringResult
class InteractionHandler(object):
_interaction_handler = None
def __init__(self):
self._cb = core.BNInteractionHandlerCallbacks()
self._cb.context = 0
self._cb.showPlainTextReport = self._cb.showPlainTextReport.__class__(self._show_plain_text_report)
self._cb.showMarkdownReport = self._cb.showMarkdownReport.__class__(self._show_markdown_report)
self._cb.showHTMLReport = self._cb.showHTMLReport.__class__(self._show_html_report)
self._cb.getTextLineInput = self._cb.getTextLineInput.__class__(self._get_text_line_input)
self._cb.getIntegerInput = self._cb.getIntegerInput.__class__(self._get_int_input)
self._cb.getAddressInput = self._cb.getAddressInput.__class__(self._get_address_input)
self._cb.getChoiceInput = self._cb.getChoiceInput.__class__(self._get_choice_input)
self._cb.getOpenFileNameInput = self._cb.getOpenFileNameInput.__class__(self._get_open_filename_input)
self._cb.getSaveFileNameInput = self._cb.getSaveFileNameInput.__class__(self._get_save_filename_input)
self._cb.getDirectoryNameInput = self._cb.getDirectoryNameInput.__class__(self._get_directory_name_input)
self._cb.getFormInput = self._cb.getFormInput.__class__(self._get_form_input)
self._cb.showMessageBox = self._cb.showMessageBox.__class__(self._show_message_box)
def register(self):
self.__class__._interaction_handler = self
core.BNRegisterInteractionHandler(self._cb)
def _show_plain_text_report(self, ctxt, view, title, contents):
try:
if view:
view = binaryview.BinaryView(handle = core.BNNewViewReference(view))
else:
view = None
self.show_plain_text_report(view, title, contents)
except:
log.log_error(traceback.format_exc())
def _show_markdown_report(self, ctxt, view, title, contents, plaintext):
try:
if view:
view = binaryview.BinaryView(handle = core.BNNewViewReference(view))
else:
view = None
self.show_markdown_report(view, title, contents, plaintext)
except:
log.log_error(traceback.format_exc())
def _show_html_report(self, ctxt, view, title, contents, plaintext):
try:
if view:
view = binaryview.BinaryView(handle = core.BNNewViewReference(view))
else:
view = None
self.show_html_report(view, title, contents, plaintext)
except:
log.log_error(traceback.format_exc())
def _get_text_line_input(self, ctxt, result, prompt, title):
try:
value = self.get_text_line_input(prompt, title)
if value is None:
return False
result[0] = core.BNAllocString(str(value))
return True
except:
log.log_error(traceback.format_exc())
def _get_int_input(self, ctxt, result, prompt, title):
try:
value = self.get_int_input(prompt, title)
if value is None:
return False
result[0] = value
return True
except:
log.log_error(traceback.format_exc())
def _get_address_input(self, ctxt, result, prompt, title, view, current_address):
try:
if view:
view = binaryview.BinaryView(handle = core.BNNewViewReference(view))
else:
view = None
value = self.get_address_input(prompt, title, view, current_address)
if value is None:
return False
result[0] = value
return True
except:
log.log_error(traceback.format_exc())
def _get_choice_input(self, ctxt, result, prompt, title, choice_buf, count):
try:
choices = []
for i in xrange(0, count):
choices.append(choice_buf[i])
value = self.get_choice_input(prompt, title, choices)
if value is None:
return False
result[0] = value
return True
except:
log.log_error(traceback.format_exc())
def _get_open_filename_input(self, ctxt, result, prompt, ext):
try:
value = self.get_open_filename_input(prompt, ext)
if value is None:
return False
result[0] = core.BNAllocString(str(value))
return True
except:
log.log_error(traceback.format_exc())
def _get_save_filename_input(self, ctxt, result, prompt, ext, default_name):
try:
value = self.get_save_filename_input(prompt, ext, default_name)
if value is None:
return False
result[0] = core.BNAllocString(str(value))
return True
except:
log.log_error(traceback.format_exc())
def _get_directory_name_input(self, ctxt, result, prompt, default_name):
try:
value = self.get_directory_name_input(prompt, default_name)
if value is None:
return False
result[0] = core.BNAllocString(str(value))
return True
except:
log.log_error(traceback.format_exc())
def _get_form_input(self, ctxt, fields, count, title):
try:
field_objs = []
for i in xrange(0, count):
if fields[i].type == FormInputFieldType.LabelFormField:
field_objs.append(LabelField(fields[i].prompt))
elif fields[i].type == FormInputFieldType.SeparatorFormField:
field_objs.append(SeparatorField())
elif fields[i].type == FormInputFieldType.TextLineFormField:
field_objs.append(TextLineField(fields[i].prompt))
elif fields[i].type == FormInputFieldType.MultilineTextFormField:
field_objs.append(MultilineTextField(fields[i].prompt))
elif fields[i].type == FormInputFieldType.IntegerFormField:
field_objs.append(IntegerField(fields[i].prompt))
elif fields[i].type == FormInputFieldType.AddressFormField:
view = None
if fields[i].view:
view = binaryview.BinaryView(handle = core.BNNewViewReference(fields[i].view))
field_objs.append(AddressField(fields[i].prompt, view, fields[i].currentAddress))
elif fields[i].type == FormInputFieldType.ChoiceFormField:
choices = []
for i in xrange(0, fields[i].count):
choices.append(fields[i].choices[i])
field_objs.append(ChoiceField(fields[i].prompt, choices))
elif fields[i].type == FormInputFieldType.OpenFileNameFormField:
field_objs.append(OpenFileNameField(fields[i].prompt, fields[i].ext))
elif fields[i].type == FormInputFieldType.SaveFileNameFormField:
field_objs.append(SaveFileNameField(fields[i].prompt, fields[i].ext, fields[i].defaultName))
elif fields[i].type == DirectoryNameField:
field_objs.append(DirectoryNameField(fields[i].prompt, fields[i].defaultName))
else:
field_objs.append(LabelField(fields[i].prompt))
if not self.get_form_input(field_objs, title):
return False
for i in xrange(0, count):
field_objs[i]._fill_core_result(fields[i])
return True
except:
log.log_error(traceback.format_exc())
def _show_message_box(self, ctxt, title, text, buttons, icon):
try:
return self.show_message_box(title, text, buttons, icon)
except:
log.log_error(traceback.format_exc())
def show_plain_text_report(self, view, title, contents):
pass
def show_markdown_report(self, view, title, contents, plaintext):
self.show_html_report(view, title, markdown_to_html(contents), plaintext)
def show_html_report(self, view, title, contents, plaintext):
if len(plaintext) != 0:
self.show_plain_text_report(view, title, plaintext)
def get_text_line_input(self, prompt, title):
return None
def get_int_input(self, prompt, title):
while True:
text = self.get_text_line_input(prompt, title)
if len(text) == 0:
return False
try:
return int(text)
except:
continue
def get_address_input(self, prompt, title, view, current_address):
return get_int_input(prompt, title)
def get_choice_input(self, prompt, title, choices):
return None
def get_open_filename_input(self, prompt, ext):
return get_text_line_input(prompt, "Open File")
def get_save_filename_input(self, prompt, ext, default_name):
return get_text_line_input(prompt, "Save File")
def get_directory_name_input(self, prompt, default_name):
return get_text_line_input(prompt, "Select Directory")
def get_form_input(self, fields, title):
return False
def show_message_box(self, title, text, buttons, icon):
return MessageBoxButtonResult.CancelButton
def markdown_to_html(contents):
return core.BNMarkdownToHTML(contents)
def show_plain_text_report(title, contents):
core.BNShowPlainTextReport(None, title, contents)
def show_markdown_report(title, contents, plaintext = ""):
core.BNShowMarkdownReport(None, title, contents, plaintext)
def show_html_report(title, contents, plaintext = ""):
core.BNShowHTMLReport(None, title, contents, plaintext)
def get_text_line_input(prompt, title):
value = ctypes.c_char_p()
if not core.BNGetTextLineInput(value, prompt, title):
return None
result = value.value
core.BNFreeString(ctypes.cast(value, ctypes.POINTER(ctypes.c_byte)))
return result
def get_int_input(prompt, title):
value = ctypes.c_longlong()
if not core.BNGetIntegerInput(value, prompt, title):
return None
return value.value
def get_address_input(prompt, title):
value = ctypes.c_ulonglong()
if not core.BNGetAddressInput(value, prompt, title, None, 0):
return None
return value.value
def get_choice_input(prompt, title, choices):
choice_buf = (ctypes.c_char_p * len(choices))()
for i in xrange(0, len(choices)):
choice_buf[i] = str(choices[i])
value = ctypes.c_ulonglong()
if not core.BNGetChoiceInput(value, prompt, title, choice_buf, len(choices)):
return None
return value.value
def get_open_filename_input(prompt, ext = ""):
value = ctypes.c_char_p()
if not core.BNGetOpenFileNameInput(value, prompt, ext):
return None
result = value.value
core.BNFreeString(ctypes.cast(value, ctypes.POINTER(ctypes.c_byte)))
return result
def get_save_filename_input(prompt, ext = "", default_name = ""):
value = ctypes.c_char_p()
if not core.BNGetSaveFileNameInput(value, prompt, ext, default_name):
return None
result = value.value
core.BNFreeString(ctypes.cast(value, ctypes.POINTER(ctypes.c_byte)))
return result
def get_directory_name_input(prompt, default_name = ""):
value = ctypes.c_char_p()
if not core.BNGetDirectoryNameInput(value, prompt, default_name):
return None
result = value.value
core.BNFreeString(ctypes.cast(value, ctypes.POINTER(ctypes.c_byte)))
return result
def get_form_input(fields, title):
value = (core.BNFormInputField * len(fields))()
for i in xrange(0, len(fields)):
if isinstance(fields[i], str):
LabelField(fields[i])._fill_core_struct(value[i])
elif fields[i] is None:
SeparatorField()._fill_core_struct(value[i])
else:
fields[i]._fill_core_struct(value[i])
if not core.BNGetFormInput(value, len(fields), title):
return False
for i in xrange(0, len(fields)):
if not (isinstance(fields[i], str) or (fields[i] is None)):
fields[i]._get_result(value[i])
core.BNFreeFormInputResults(value, len(fields))
return True
def show_message_box(title, text, buttons = MessageBoxButtonResult.OKButton, icon = MessageBoxIcon.InformationIcon):
return core.BNShowMessageBox(title, text, buttons, icon)
|
[
{
"type": "go_to",
"params": {
"line": 25,
"column": 70,
"text": null,
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 25,
"column": 70
},
"end": {
"line": 25,
"column": 70
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "Set, MessageBoxButton",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 519,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 519,
"column": 0
},
"end": {
"line": 519,
"column": 117
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "def show_message_box(title, text, buttons = MessageBoxButtonSet.OKButtonSet, ic",
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 519,
"column": 76
},
"end": {
"line": 519,
"column": 79
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " ic",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "on = MessageBoxI",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 519,
"column": 93,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 519,
"column": 95,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "con.InformationI",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "con):\n \"\"\"\n ``show",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "_message_box`` Displays a ",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 521,
"column": 32,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 521,
"column": 36,
"text": null,
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 521,
"column": 36,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "configurable message box in the UI, or p",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "rompts on the console as appropriate\n re",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "trieves a list ",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "of all Symbol objects of",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " the provided symbol ",
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 522,
"column": 63
},
"end": {
"line": 522,
"column": 66
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "ol ",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "type in the option",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 522,
"column": 84,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "ally\n provided range.\n",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "\n :param str title: Text tit",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "le for the message box.\n ",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " :param str text: Text",
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 526,
"column": 24
},
"end": {
"line": 526,
"column": 25
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "t",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " for the main body of the m",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 526,
"column": 43,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 526,
"column": 52,
"text": null,
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 526,
"column": 52,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "essage box.\n :param",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " MessageB",
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 527,
"column": 17
},
"end": {
"line": 527,
"column": 19
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "eB",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "oxButtonSet buttons: One ",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "of :py:class:`MessageBox",
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 527,
"column": 65
},
"end": {
"line": 527,
"column": 68
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "Box",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "ButtonSet`\n :param MessageB",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 528,
"column": 15,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 528,
"column": 19,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "oxIcon icon: ",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 528,
"column": 32,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "One of :py:class",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": ":`MessageBoxIcon`\n :return: Which b",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "utton was select",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "ed\n :rtype: MessageBoxButtonResult\n \"\"\"\n",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
}
] |
python
|
{
"dataset": "bigcode/commitpack",
"split": "train",
"mode": "file",
"repo": "Vector35/binaryninja-api,Vector35/binaryninja-api,Vector35/binaryninja-api,Vector35/binaryninja-api,joshwatson/binaryninja-api,Vector35/binaryninja-api,joshwatson/binaryninja-api,joshwatson/binaryninja-api,Vector35/binaryninja-api,joshwatson/binaryninja-api,Vector35/binaryninja-api,joshwatson/binaryninja-api",
"status": "M",
"commit": "12d16ae11dd2c0524a36c59c5b025c38edb4135b",
"parent": null,
"path": "python/interaction.py",
"old_path": "python/interaction.py",
"hunk_index": null,
"adapter_meta": {
"commit": {
"message": "add documentation for show_message_box and fix default button parameter\n",
"author_date": null,
"committer_date": null,
"stats": null,
"dataset_context": {
"dataset": "bigcode/commitpack",
"split": "train"
}
},
"file": {}
}
}
|
from AG_fft_tools import correlate2d
from agpy import gaussfitter
import warnings
import numpy as np
def cross_correlation_shifts_FITS(fitsfile1, fitsfile2, return_cropped_images=False, quiet=True, sigma_cut=False, **kwargs):
"""
Determine the shift between two FITS images using the cross-correlation
technique. Requires montage or hcongrid.
Parameters
----------
fitsfile1: str
Reference fits file name
fitsfile2: str
Offset fits file name
return_cropped_images: bool
Returns the images used for the analysis in addition to the measured
offsets
quiet: bool
Silence messages?
sigma_cut: bool or int
Perform a sigma-cut before cross-correlating the images to minimize
noise correlation?
"""
import montage
try:
import astropy.io.fits as pyfits
import astropy.wcs as pywcs
except ImportError:
import pyfits
import pywcs
import tempfile
header = pyfits.getheader(fitsfile1)
temp_headerfile = tempfile.NamedTemporaryFile()
header.toTxtFile(temp_headerfile.name)
outfile = tempfile.NamedTemporaryFile()
montage.wrappers.reproject(fitsfile2, outfile.name, temp_headerfile.name, exact_size=True, silent_cleanup=quiet)
image2_projected = pyfits.getdata(outfile.name)
image1 = pyfits.getdata(fitsfile1)
outfile.close()
temp_headerfile.close()
if image1.shape != image2_projected.shape:
raise ValueError("montage failed to reproject images to same shape.")
if sigma_cut:
corr_image1 = image1*(image1 > image1.std()*sigma_cut)
corr_image2 = image2_projected*(image2_projected > image2_projected.std()*sigma_cut)
OK = (corr_image1==corr_image1)*(corr_image2==corr_image2)
if (corr_image1[OK]*corr_image2[OK]).sum() == 0:
print "Could not use sigma_cut of %f because it excluded all valid data" % sigma_cut
corr_image1 = image1
corr_image2 = image2_projected
else:
corr_image1 = image1
corr_image2 = image2_projected
verbose = kwargs.pop('verbose') if 'verbose' in kwargs else not quiet
xoff,yoff = cross_correlation_shifts(corr_image1, corr_image2, verbose=verbose,**kwargs)
wcs = pywcs.WCS(header)
try:
xoff_wcs,yoff_wcs = np.inner( np.array([[xoff,0],[0,yoff]]), wcs.wcs.cd )[[0,1],[0,1]]
except AttributeError:
xoff_wcs,yoff_wcs = 0,0
if return_cropped_images:
return xoff,yoff,xoff_wcs,yoff_wcs,image1,image2_projected
else:
return xoff,yoff,xoff_wcs,yoff_wcs
def cross_correlation_shifts(image1, image2, errim1=None, errim2=None,
maxoff=None, verbose=False, gaussfit=False, return_error=False,
**kwargs):
""" Use cross-correlation and a 2nd order taylor expansion to measure the
offset between two images
Given two images, calculate the amount image2 is offset from image1 to
sub-pixel accuracy using 2nd order taylor expansion.
Parameters
----------
image1: np.ndarray
The reference image
image2: np.ndarray
The offset image. Must have the same shape as image1
errim1: np.ndarray [optional]
The pixel-by-pixel error on the reference image
errim2: np.ndarray [optional]
The pixel-by-pixel error on the offset image.
maxoff: int
Maximum allowed offset (in pixels). Useful for low s/n images that you
know are reasonably well-aligned, but might find incorrect offsets due to
edge noise
verbose: bool
Print out extra messages?
gaussfit : bool
Use a Gaussian fitter to fit the peak of the cross-correlation?
return_error: bool
Return an estimate of the error on the shifts. WARNING: I still don't
understand how to make these agree with simulations.
The analytic estimate comes from
http://adsabs.harvard.edu/abs/2003MNRAS.342.1291Z
**kwargs are passed to correlate2d, which in turn passes them to convolve.
The available options include image padding for speed and ignoring NaNs.
References
----------
From http://solarmuri.ssl.berkeley.edu/~welsch/public/software/cross_cor_taylor.pro
Examples
--------
>>> import numpy as np
>>> im1 = np.zeros([10,10])
>>> im2 = np.zeros([10,10])
>>> im1[4,3] = 1
>>> im2[5,5] = 1
>>> import AG_image_tools
>>> yoff,xoff = AG_image_tools.cross_correlation_shifts(im1,im2)
>>> im1_aligned_to_im2 = np.roll(np.roll(im1,int(yoff),1),int(xoff),0)
>>> assert (im1_aligned_to_im2-im2).sum() == 0
"""
if not image1.shape == image2.shape:
raise ValueError("Images must have same shape.")
quiet = kwargs.pop('quiet') if 'quiet' in kwargs else not verbose
ccorr = (correlate2d(image1,image2,quiet=quiet,**kwargs) / image1.size)
# allow for NaNs set by convolve (i.e., ignored pixels)
ccorr[ccorr!=ccorr] = 0
if ccorr.shape != image1.shape:
raise ValueError("Cross-correlation image must have same shape as input images. This can only be violated if you pass a strange kwarg to correlate2d.")
ylen,xlen = image1.shape
xcen = xlen/2-(1-xlen%2)
ycen = ylen/2-(1-xlen%2)
if ccorr.max() == 0:
warnings.warn("WARNING: No signal found! Offset is defaulting to 0,0")
return 0,0
if maxoff is not None:
if verbose: print "Limiting maximum offset to %i" % maxoff
subccorr = ccorr[ycen-maxoff:ycen+maxoff+1,xcen-maxoff:xcen+maxoff+1]
ymax,xmax = np.nonzero(subccorr == subccorr.max())
xmax = xmax+xcen-maxoff
ymax = ymax+ycen-maxoff
else:
ymax,xmax = np.nonzero(ccorr == ccorr.max())
subccorr = ccorr
if return_error:
if errim1 is None:
errim1 = np.ones(ccorr.shape) * image1[image1==image1].std()
if errim2 is None:
errim2 = np.ones(ccorr.shape) * image2[image2==image2].std()
eccorr =( (correlate2d(errim1**2, image2**2,quiet=quiet,**kwargs)+
correlate2d(errim2**2, image1**2,quiet=quiet,**kwargs))**0.5
/ image1.size)
if maxoff is not None:
subeccorr = eccorr[ycen-maxoff:ycen+maxoff+1,xcen-maxoff:xcen+maxoff+1]
else:
subeccorr = eccorr
if gaussfit:
if return_error:
pars,epars = gaussfitter.gaussfit(subccorr,err=subeccorr,return_all=True)
exshift = epars[2]
eyshift = epars[3]
else:
pars,epars = gaussfitter.gaussfit(subccorr,return_all=True)
xshift = maxoff - pars[2] if maxoff is not None else xcen - pars[2]
yshift = maxoff - pars[3] if maxoff is not None else ycen - pars[3]
else:
xshift_int = xmax-xcen
yshift_int = ymax-ycen
local_values = ccorr[ymax-1:ymax+2,xmax-1:xmax+2]
d1y,d1x = np.gradient(local_values)
d2y,d2x,dxy = second_derivative(local_values)
fx,fy,fxx,fyy,fxy = d1x[1,1],d1y[1,1],d2x[1,1],d2y[1,1],dxy[1,1]
shiftsubx=(fyy*fx-fy*fxy)/(fxy**2-fxx*fyy)
shiftsuby=(fxx*fy-fx*fxy)/(fxy**2-fxx*fyy)
xshift = -(xshift_int+shiftsubx)[0]
yshift = -(yshift_int+shiftsuby)[0]
# http://adsabs.harvard.edu/abs/2003MNRAS.342.1291Z
# Zucker error
if return_error:
ccorrn = ccorr / eccorr**2 / ccorr.size #/ (errim1.mean()*errim2.mean()) #/ eccorr**2
print np.min(ccorrn),np.max(ccorrn)
exshift = (np.abs(-1 * ccorrn.size * fxx/ccorrn[ymax,xmax] *
(ccorrn[ymax,xmax]**2/(1-ccorrn[ymax,xmax]**2)))**-0.5) [0]
eyshift = (np.abs(-1 * ccorrn.size * fyy/ccorrn[ymax,xmax] *
(ccorrn[ymax,xmax]**2/(1-ccorrn[ymax,xmax]**2)))**-0.5) [0]
if np.isnan(exshift):
raise ValueError("Error: NAN error!")
if return_error:
return xshift,yshift,exshift,eyshift
else:
return xshift,yshift
def second_derivative(image):
"""
Compute the second derivative of an image
The derivatives are set to zero at the edges
Parameters
----------
image: np.ndarray
Returns
-------
d/dx^2, d/dy^2, d/dxdy
All three are np.ndarrays with the same shape as image.
"""
shift_right = np.roll(image,1,1)
shift_right[:,0] = 0
shift_left = np.roll(image,-1,1)
shift_left[:,-1] = 0
shift_down = np.roll(image,1,0)
shift_down[0,:] = 0
shift_up = np.roll(image,-1,0)
shift_up[-1,:] = 0
shift_up_right = np.roll(shift_up,1,1)
shift_up_right[:,0] = 0
shift_down_left = np.roll(shift_down,-1,1)
shift_down_left[:,-1] = 0
shift_down_right = np.roll(shift_right,1,0)
shift_down_right[0,:] = 0
shift_up_left = np.roll(shift_left,-1,0)
shift_up_left[-1,:] = 0
dxx = shift_right+shift_left-2*image
dyy = shift_up +shift_down-2*image
dxy=0.25*(shift_up_right+shift_down_left-shift_up_left-shift_down_right)
return dxx,dyy,dxy
try:
import pytest
import itertools
from scipy import interpolate
shifts = [1,1.5,-1.25,8.2,10.1]
sizes = [99,100,101]
gaussfits = (True,False)
def make_offset_images(xsh,ysh,imsize, width=3.0, amp=1000.0, noiseamp=1.0,
xcen=50, ycen=50):
image = np.random.randn(imsize,imsize) * noiseamp
Y, X = np.indices([imsize, imsize])
X -= xcen
Y -= ycen
new_r = np.sqrt(X*X+Y*Y)
image += amp*np.exp(-(new_r)**2/(2.*width**2))
tolerance = 3. * 1./np.sqrt(2*np.pi*width**2*amp/noiseamp)
new_image = np.random.randn(imsize,imsize)*noiseamp + amp*np.exp(-((X-xsh)**2+(Y-ysh)**2)/(2.*width**2))
return image, new_image, tolerance
@pytest.mark.parametrize(('xsh','ysh','imsize','gaussfit'),list(itertools.product(shifts,shifts,sizes,gaussfits)))
def test_shifts(xsh,ysh,imsize,gaussfit):
image,new_image,tolerance = make_offset_images(xsh, ysh, imsize)
if gaussfit:
xoff,yoff,exoff,eyoff = cross_correlation_shifts(image,new_image)
print xoff,yoff,np.abs(xoff-xsh),np.abs(yoff-ysh),exoff,eyoff
else:
xoff,yoff = cross_correlation_shifts(image,new_image)
print xoff,yoff,np.abs(xoff-xsh),np.abs(yoff-ysh)
assert np.abs(xoff-xsh) < tolerance
assert np.abs(yoff-ysh) < tolerance
def do_n_fits(nfits, xsh, ysh, imsize, gaussfit=False, maxoff=None,
**kwargs):
"""
Test code
Parameters
----------
nfits : int
Number of times to perform fits
xsh : float
X shift from input to output image
ysh : float
Y shift from input to output image
imsize : int
Size of image (square)
"""
offsets = [
cross_correlation_shifts(
*make_offset_images(xsh, ysh, imsize, **kwargs)[:2],
gaussfit=gaussfit, maxoff=maxoff)
for ii in xrange(nfits)]
return offsets
except ImportError:
pass
|
[
{
"type": "go_to",
"params": {
"line": 108,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " At high signal-to-n",
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 108,
"column": 25
},
"end": {
"line": 108,
"column": 27
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "-n",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "oise, the analytic version ov",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "erestimates the error\n by a factor of about 1.8, while the gaussian versio",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "n overestimates\n ",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " error by about 1.15. At low s/n, the",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 110,
"column": 45,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "y both UNDERestimate the error.\n The tra",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 111,
"column": 10,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 111,
"column": 15,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "nsition zone occurs at a *",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "total* S/N ~ 1000 (i.e., the total\n si",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 112,
"column": 8,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 112,
"column": 10,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "gnal in the map divid",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 112,
"column": 31,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "ed by the standard deviation",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " of the map - \n ",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 113,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 113,
"column": 1,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " it dep",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 113,
"column": 12,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 113,
"column": 14,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "ends on how many pixels have signal",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 113,
"column": 45,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 113,
"column": 49,
"text": null,
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 113,
"column": 49,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": ")\n",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 205,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 205,
"column": 0
},
"end": {
"line": 205,
"column": 48
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 264,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " amps = [5.,10.,50.,100.,500.,1000.]\n",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 316,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "\n @pytest.mark.para",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "metrize(('xsh','ysh','imsize','amp','gaussfit'),lis",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "t(itertools.product(shifts,shifts,size",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 317,
"column": 105,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 317,
"column": 110,
"text": null,
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 317,
"column": 110,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "s,amps,gaussf",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "its)))\n def run_tests(",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "xsh, ysh, imsize, amp, gaussfit, nfits=10",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 318,
"column": 49,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 318,
"column": 59,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "00, maxoff=20):\n fitted_shifts = np.array(do_n_fits(",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 319,
"column": 39,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 319,
"column": 43,
"text": null,
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 319,
"column": 43,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "nfits, xsh, ysh, ",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "imsize, amp=amp, maxoff=maxoff))\n errors = ",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "fitted_shifts.std(axi",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 320,
"column": 35,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 320,
"column": 38,
"text": null,
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 320,
"column": 38,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "s=0)\n x,y,ex,ey",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " = cross_correlation_shifts(\n ",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " *make_offset_image",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "s(xsh, ysh, imsize, amp=amp)",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 322,
"column": 62,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "[:2],\n ",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " gaussfit=gau",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "ssfit, maxoff=maxoff, return_error=",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "True,\n ",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 324,
"column": 9,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 324,
"column": 15,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " erri",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 324,
"column": 20,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "m1=np.ones([imsize",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": ",imsize]),\n err",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "im2=np.ones([im",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 325,
"column": 34,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "size,imsize]))\n print \"Std",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "Dev: %10.3g,%10.3g Measured: %10.3g,%10.3g Diff",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "erence: %10.",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "3g, %10.3g Diff/Real: %10",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 326,
"column": 102,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 326,
"column": 105,
"text": null,
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 326,
"column": 104
},
"end": {
"line": 326,
"column": 105
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "0",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 326,
"column": 105,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": ".3g,%10.3g\" ",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 326,
"column": 113,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 326,
"column": 117,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "% (\n errors[0],errors",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 327,
"column": 25,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 327,
"column": 28,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "[1], ex,ey,errors[",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 327,
"column": 46,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "0]-ex,err",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "ors[1]-ey,\n (err",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "ors[0]-ex)/error",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "s[0], (errors[1]-ey)/err",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 328,
"column": 56,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "ors[1])\n\n return errors[0],errors",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "[1],ex,ey\n\n de",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "f determine_error_offsets()",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 332,
"column": 33,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": ":\n \"\"\"\n ",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 334,
"column": 5,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 334,
"column": 7,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " Experiment to determine how wrong the error estimates are\n (WHY ",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "are they wrong? Still don't unders",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "tand)\n ",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " \"\"\"\n # an",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 337,
"column": 12,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "alytic\n ",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 338,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 338,
"column": 5,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " A = np.array([run",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 338,
"column": 20,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 338,
"column": 25,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "_tests(1.5,1.5,50,a,False,",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "nfits=200) for ",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "a in np.logspace(1.5,3,30)]);\n ",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 339,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 339,
"column": 5,
"text": null,
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 339,
"column": 5,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " G = np.array([run_tests(1.5,1.5,",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "50,a,True,nfits=20",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 339,
"column": 56,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 339,
"column": 58,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "0) for",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " a in np.logspace(1.5,3,30)",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 339,
"column": 91,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "]);\n print \"",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "Analytic offs",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "et: %g\" % (( (A[",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": ":,3]/A[:,1]).mean()",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " + (A[:,2]/A[:,0",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "]).mean()",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 340,
"column": 83,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 340,
"column": 88,
"text": null,
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 340,
"column": 88,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " )/2. )\n print \"Gau",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "ssian offset: %g",
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 341,
"column": 31
},
"end": {
"line": 341,
"column": 34
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " %g",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "\" % (( (G[:,3]/G[:",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": ",1]).mean() + (G[:,2]/G[:,0]).mean() )/2. )\n",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 342,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
}
] |
python
|
{
"dataset": "bigcode/commitpack",
"split": "train",
"mode": "file",
"repo": "keflavich/agpy,astocko/agpy,astocko/agpy,keflavich/agpy,keflavich/agpy,keflavich/agpy,astocko/agpy,astocko/agpy",
"status": "M",
"commit": "de534b0c95ddc373d79d0f698e3c545c1ee57c1e",
"parent": null,
"path": "AG_image_tools/cross_correlation_shifts.py",
"old_path": "AG_image_tools/cross_correlation_shifts.py",
"hunk_index": null,
"adapter_meta": {
"commit": {
"message": "added new tests to cross_correlation_shifts, along with some docs\n\n\ngit-svn-id: 4511b3d811903a91403bf66535cdd426db6c410e@520 7a03b35a-38c0-11de-8fef-1f5ae417a2df\n",
"author_date": null,
"committer_date": null,
"stats": null,
"dataset_context": {
"dataset": "bigcode/commitpack",
"split": "train"
}
},
"file": {}
}
}
|
import os
import csv
import sys
from optparse import make_option
from collections import Callable
from django.db.models import Q
from django.contrib.sites.models import Site
from django.db.models.loading import get_model
from django.core.management.base import LabelCommand, CommandError
DOMAIN = Site.objects.get_current().domain
class Command(LabelCommand):
option_list = LabelCommand.option_list + (
make_option('--fields', dest='fields', default=None),
make_option('--filters', dest='filters', default=None),
make_option('--ordering', dest='ordering', default=None),
make_option('--range', dest='range', default=None)
)
help = 'Export any data in csv'
label = 'app.model'
def get_model(self, label):
app, model = label.split('.', 1)
Model = get_model(app, model)
if not Model:
raise CommandError('Model "{0}" not found!'.format(label))
return Model
def get_result_filename(self, label):
directory = os.path.join(os.path.expanduser('~'), 'exportdata')
# TODO: add option for configuration directory
if not os.path.exists(directory):
os.makedirs(directory)
return os.path.join(directory, '{0}.csv'.format(label))
def set_filters(self, qs, filters):
if filters:
filters = filters.split(',')
for filter_name in filters:
if '=' in filter_name:
field, value = filter_name.split('=', 1)
qs = qs.filter(**{field: value})
elif hasattr(qs, filter_name):
qs = getattr(qs, filter_name)()
else:
msg = 'Model has no method "{0}" ' \
'or this filter not "key=value" ' \
'formatted'.format(filter_name)
raise CommandError(msg)
return qs
def set_ordering(self, qs, ordering):
if ordering:
ordering = ordering.split(',')
qs = qs.order_by(*ordering)
return qs
def set_range(self, qs, pk_range):
if pk_range:
if '-' in pk_range:
from_value, to_value = pk_range.split('-', 1)
qs = qs.filter(pk__gte=from_value)
qs = qs.filter(pk__lte=to_value)
if ',' in pk_range:
values = pk_range.split(',')
lookup = Q(pk=values[0])
for value in values[1:]:
lookup |= Q(pk=value)
qs = qs.filter(lookup)
return qs
def get_fields(self, fields, Model):
if not fields:
return map(lambda x: x.name, Model._meta.fields)
return fields.split(',')
def get_field_data(self, field_name, obj):
if '__' in field_name:
parent_field, child_field = field_name.split('__', 1)
if not hasattr(obj, parent_field):
msg = 'Model object has no attribute "{0}"'.format(
parent_field)
raise CommandError(msg)
field = getattr(obj, parent_field, None)
if not hasattr(field, child_field):
msg = '"{0}" object has no attribute "{1}"'.format(
parent_field, child_field)
raise CommandError(msg)
field = getattr(field, child_field)
else:
field = getattr(obj, field_name)
if isinstance(field, Callable):
field = field()
# TODO: move get_absolute_url to options (site_url_fields)
if field_name == 'get_absolute_url':
# hack, because in python not possible
# check function has a decorator
field = u'http://{0}{1}'.format(DOMAIN, field)
if isinstance(field, (str, unicode,)):
field = field.encode('utf-8')
return field
def handle_label(self, label, **options):
fields = options.get('fields')
filters = options.get('filters')
ordering = options.get('ordering')
pk_range = options.get('range')
Model = self.get_model(label)
filename = self.get_result_filename(label)
resultcsv = csv.writer(open(filename, 'wb'), delimiter=';',
quoting=csv.QUOTE_MINIMAL)
qs = Model.objects.all()
qs = self.set_filters(qs, filters)
qs = self.set_ordering(qs, ordering)
qs = self.set_range(qs, pk_range)
fields = self.get_fields(fields, Model)
resultcsv.writerow(fields)
for obj in qs:
result = []
for field_name in fields:
data = self.get_field_data(field_name, obj)
result.append(data)
resultcsv.writerow(result)
sys.exit('Done! Exported objects: {0}'.format(qs.count()))
|
[
{
"type": "go_to",
"params": {
"line": 21,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 21,
"column": 0
},
"end": {
"line": 21,
"column": 59
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " make_option('-",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 21,
"column": 16,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 21,
"column": 22,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "-range', dest='range', ",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "default=None),\n make_option('--filepath', dest='f",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 22,
"column": 33,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 22,
"column": 41,
"text": null,
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 22,
"column": 41,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "ilepath', default=None),\n",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 33,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 33,
"column": 0
},
"end": {
"line": 35,
"column": 55
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " def get_result_filename(self, filepath, label):\n ",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "if filepath",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": ":\n directory = filepa",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 35,
"column": 30,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "th.rsplit('/', 1)\n ",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " filename = directory.pop()\n ",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 37,
"column": 2,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 37,
"column": 7,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " if directory:\n",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " directory = directory.pop()\n else:\n ",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 40,
"column": 3,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 40,
"column": 8,
"text": null,
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 40,
"column": 8,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " directory = '.'\n else:\n ",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " directory = os.path.join(os.path.expanduser('~'), 'exportdata')\n ",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 43,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 43,
"column": 2,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " filename = '",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "{0}.csv'.format(lab",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 43,
"column": 43,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "el)\n",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 38,
"column": 39,
"text": null,
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 38,
"column": 39
},
"end": {
"line": 38,
"column": 62
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "filename",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 117,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " fi",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "lepath = options.get('filepath')\n",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 119,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 119,
"column": 0
},
"end": {
"line": 120,
"column": 68
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " full_path = self.ge",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "t_result_filename(fil",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "epath, label)\n resultcsv = csv.writer(open(",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "full_path, 'wb'), delimiter=';',\n",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 121,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
}
] |
python
|
{
"dataset": "bigcode/commitpack",
"split": "train",
"mode": "file",
"repo": "saippuakauppias/django-exportdata",
"status": "M",
"commit": "a831c0a341e861dd7e8be7484d6853dc596bd35a",
"parent": null,
"path": "exportdata/management/commands/exportdata.py",
"old_path": "exportdata/management/commands/exportdata.py",
"hunk_index": null,
"adapter_meta": {
"commit": {
"message": "add support for set --filepath option: close #3\n",
"author_date": null,
"committer_date": null,
"stats": null,
"dataset_context": {
"dataset": "bigcode/commitpack",
"split": "train"
}
},
"file": {}
}
}
|
import logging
import json
import requests
import datetime
import math
import time
# django modules
from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render, get_object_or_404
from django.views.generic import View, TemplateView, ListView, DetailView
from django.db.models import Q,Count
from django.conf import settings
from django.http import HttpResponse, JsonResponse, HttpResponseRedirect
from django.utils import timezone
from django.contrib.auth import _get_backends, login
from django.core.urlresolvers import reverse
from django.utils.decorators import method_decorator
# local modules
from app import models
from app.utils.smsUtil import tpl_send_sms, TPL_STU_PAY_FAIL
from app.utils.types import parseInt
from app.exception import TimeSlotConflict, OrderStatusIncorrect, RefundError
from .wxapi import *
logger = logging.getLogger('app')
# Create your views here.
def _get_default_bankend_path():
for backend, backend_path in _get_backends(return_tuples=True):
return backend_path
def _get_parent(request):
parent = None
if not request.user.is_anonymous():
try:
parent = request.user.parent
except:
pass
if not parent:
# 通过 wx_openid 获得家长
openid = request.GET.get("openid", None)
if not openid:
openid = request.POST.get("openid", None)
if openid:
profile = models.Profile.objects.filter(wx_openid=openid).order_by('-id').first()
try:
parent = profile and profile.user.parent or None
except:
pass
if parent:
parent.user.backend = _get_default_bankend_path()
login(request, parent.user)
return parent
class TeachersView(ListView):
model = models.Teacher
context_object_name = 'teacher_list'
template_name = 'wechat/teacher/teachers.html'
def get_queryset(self):
teacher_list = self.model.objects.filter(
recommended_on_wechat=True
).filter(
published=True
)
return teacher_list
class TeacherDetailView(DetailView):
model = models.Teacher
class SchoolsView(ListView):
model = models.School
context_object_name = 'school_list'
template_name = 'wechat/school/schools.html'
def get_queryset(self):
school_list=self.model.objects.annotate(num_photos=Count('schoolphoto'))
queryset = {}
queryset['expr_center_list'] = school_list.filter(
center = True
)
queryset['community_center_list'] = school_list.filter(
center=False
)
return queryset
def get_context_data(self, **kwargs):
context = super(SchoolsView, self).get_context_data(**kwargs)
openid = self.request.GET.get("openid", None) or self.request.POST.get("openid", None)
server_timestamp = int(time.time())
nonce_str = make_nonce_str()
access_token, msg = _get_wx_token()
jsapi_ticket, msg = _get_wx_jsapi_ticket(access_token)
cur_url = self.request.build_absolute_uri()
schools = self.model.objects.all()
photosdic = {}
for school in schools:
photosdic[school.id] = school.get_photo_url_list()
# photosdic = json.dumps(photosdic)
signature = wx_signature({'noncestr': nonce_str,
'jsapi_ticket': jsapi_ticket,
'timestamp': server_timestamp,
'url': cur_url})
context['WX_OPENID'] = openid
context['WX_APPID'] = settings.WEIXIN_APPID
context['WX_NONCE_STR'] = nonce_str
context['WX_SIGNITURE'] = signature
context['server_timestamp'] = server_timestamp
context['photosdic'] = photosdic
return context
class SchoolDetailView(ListView):
models = models.School
@method_decorator(csrf_exempt, name='dispatch')
class CourseChoosingView(View):
template_name = 'wechat/order/course_choosing.html'
def get(self, request):
teacher_id = request.GET.get('teacher_id', -1)
kwargs = {}
teacher = get_object_or_404(models.Teacher, pk=teacher_id)
kwargs['teacher'] = teacher
current_user = self.request.user
kwargs['current_user'] = current_user
if settings.TESTING:
# the below line is only for testing
parent = models.Parent.objects.get(pk=3)
parent.user.backend = _get_default_bankend_path()
login(request, parent.user)
else:
parent = _get_parent(request)
if parent is None:
return HttpResponseRedirect(WX_AUTH_URL)
kwargs['parent'] = parent
subject = teacher.subject() # 目前老师只有一个科目
order_count = models.Order.objects.filter(
parent=parent, subject=subject,
status=models.Order.PAID).count()
first_buy = order_count <= 0 # 对于当前科目来说, 是第一次购买
kwargs['first_buy'] = first_buy
kwargs['evaluate_time'] = int(models.TimeSlot.GRACE_TIME.total_seconds()) # 第一次购买某个科目时, 建档需要的时间, 精确到秒
# abilities = teacher.abilities.all()
# kwargs['abilities'] = abilities
prices = teacher.prices()
kwargs['prices'] = prices
# schools = teacher.schools.all()
schools = list(models.School.objects.all())
kwargs['schools'] = schools
kwargs['daily_time_slots'] = models.WeeklyTimeSlot.DAILY_TIME_SLOTS
now = timezone.now()
now_timestamp = int(now.timestamp())
kwargs['server_timestamp'] = now_timestamp
date_from = now.replace(hour=0, minute=0, second=0, microsecond=0)
date_to = now.replace(hour=0, minute=0, second=0, microsecond=0) + datetime.timedelta(days=1)
coupons = models.Coupon.objects.filter(parent=parent, validated_start__lte=date_from, expired_at__gt=date_to, used=False
).order_by('-amount', 'expired_at')
kwargs['coupons'] = coupons
pre_chosen_coupon = None
for coupon in coupons:
if coupon.mini_course_count==0:
pre_chosen_coupon = coupon
break
# pre_chosen_coupon = pre_chosen_coupon or coupons.first()
kwargs['pre_chosen_coupon'] = pre_chosen_coupon
url = request.build_absolute_uri()
sign_data = _jssdk_sign(url)
kwargs.update(sign_data)
kwargs['WX_APPID'] = settings.WEIXIN_APPID
return render(request, self.template_name, kwargs)
def post(self, request):
action = request.POST.get('action')
if action == 'confirm':
return self.confirm_order(request)
if action == 'verify':
return self.verify_order(request)
if action == 'schools_dist':
return self.schools_distance(request)
return HttpResponse("Not supported request.", status=403)
def confirm_order(self, request):
if settings.TESTING:
# the below line is only for testing
parent = models.Parent.objects.get(pk=3)
else:
parent = _get_parent(request)
if not parent:
return JsonResponse({'ok': False, 'msg': '您还未登录', 'code': 403})
if settings.TESTING:
# the below line is real wx_openid, but not related with ours server
wx_openid = 'oUpF8uMuAJO_M2pxb1Q9zNjWeS6o'
pass
else:
wx_openid = parent.user.profile.wx_openid
pass
if not wx_openid:
return JsonResponse({'ok': False, 'msg': '您还未关注公共号', 'code': 403})
# get request params
teacher_id = request.POST.get('teacher')
school_id = request.POST.get('school')
grade_id = request.POST.get('grade')
subject_id = request.POST.get('subject')
coupon_id = request.POST.get('coupon')
hours = parseInt(request.POST.get('hours'))
weekly_time_slot_ids = request.POST.get('weekly_time_slots').split('+')
if not hours or not weekly_time_slot_ids:
return JsonResponse({'ok': False, 'msg': '时间选择参数错误', 'code': 1})
# check params and get ref obj
teacher = get_object_or_404(models.Teacher, pk=teacher_id)
school = get_object_or_404(models.School, pk=school_id)
grade = get_object_or_404(models.Grade, pk=grade_id)
subject = teacher.subject() # 老师只有一个科目
coupon = coupon_id and coupon_id != '0' and get_object_or_404(models.Coupon, pk=coupon_id) or None
weekly_time_slots = [get_object_or_404(models.WeeklyTimeSlot, pk=w_id) for w_id in weekly_time_slot_ids]
if coupon and coupon.used:
return JsonResponse({'ok': False, 'msg': '您所选择代金券已使用, 请重新选择', 'code': 2})
# create order
order = models.Order.objects.create(
parent=parent, teacher=teacher, school=school,
grade=grade, subject=subject, hours=hours, coupon=coupon)
order.weekly_time_slots.add(*weekly_time_slots)
order.save()
# get wx pay order
ret_json = wx_pay_unified_order(order, request, wx_openid)
if not ret_json['ok']:
return JsonResponse({'ok': False, 'msg': ret_json['msg'], 'code': -500})
# 构造js-sdk 支付接口参数 appId, timeStamp, nonceStr, package, signType
data = {}
data['timeStamp'] = int(timezone.now().timestamp())
data['nonceStr'] = make_nonce_str()
data['package'] = 'prepay_id={id}'.format(id=ret_json['data']['prepay_id'])
data['signType'] = 'MD5'
data['appId'] = settings.WEIXIN_APPID
data['paySign'] = wx_sign_for_pay(data)
data['prepay_id'] = ret_json['data']['prepay_id']
data['order_id'] = order.order_id
logger.debug(data)
return JsonResponse({'ok': True, 'msg': '', 'code': '', 'data': data})
def verify_order(self, request):
# get request params
prepay_id = request.POST.get('prepay_id')
order_id = request.POST.get('order_id')
query_ret = wx_pay_order_query(order_id=order_id)
if query_ret['ok']:
trade_state = query_ret['data']['trade_state']
if trade_state == WX_SUCCESS:
# 支付成功, 设置订单支付成功, 并且生成课程安排
try:
set_order_paid(prepay_id=prepay_id, order_id=query_ret['data']['out_trade_no'], open_id=query_ret['data']['openid'])
except:
pass # 忽略错误, 微信端直接关闭页面
return JsonResponse({'ok': True, 'msg': '', 'code': 0})
else:
if trade_state == WX_PAYERROR:
return {'ok': False, 'msg': '支付失败', 'code': 2}
else:
return {'ok': False, 'msg': '未支付', 'code': 3}
else:
return {'ok': False, 'msg': query_ret['msg'], 'code': 1}
def schools_distance(self, request):
lat = request.POST.get('lat', None)
lng = request.POST.get('lng', None)
if lat is None or lat == '' or lng is None or lng == '':
return JsonResponse({'ok': False})
lat = float(lat)
lng = float(lng)
# schools = teacher.schools.all()
schools = models.School.objects.all()
distances = []
p = {'lat': lat, 'lng': lng}
for school in schools:
if school.latitude is None or school.longitude is None:
distances.append({'id': school.id, 'far': ''})
continue
sp = {'lat': school.latitude, 'lng': school.longitude}
dis = calculateDistance(p, sp)
distances.append({'id': school.id, 'far': dis})
return JsonResponse({'ok': True, 'list': distances})
def _jssdk_sign(url):
now = timezone.now()
now_timestamp = int(now.timestamp())
nonce_str = make_nonce_str()
access_token, msg = _get_wx_token()
jsapi_ticket, msg = _get_wx_jsapi_ticket(access_token)
data = {'noncestr': nonce_str,
'jsapi_ticket': jsapi_ticket,
'timestamp': now_timestamp,
'url': url}
signature = wx_signature(data)
return {'noncestr': nonce_str,
'timestamp': now_timestamp,
'signature': signature}
def set_order_paid(prepay_id=None, order_id=None, open_id=None):
"""
支付成功, 设置订单支付成功, 并且生成课程安排
有两个地方调用:
1, 刚支付完, verify order 主动去微信查询订单状态, 当支付成功时调用
2, 接受微信支付结果异步通知中, 当支付成功时调用
"""
charge = None
if prepay_id:
charge = models.Charge.objects.get(ch_id=prepay_id)
elif order_id:
charge = models.Charge.objects.get(order__order_id=order_id)
if charge.paid:
return # 已经处理过了, 直接返回
charge.paid = True
charge.time_paid = timezone.now()
# charge.transaction_no = ''
charge.save()
order = charge.order
if not order_id:
order_id = order.order_id
if order.status == models.Order.PAID:
return # 已经处理过了, 直接返回
order.status = models.Order.PAID
order.paid_at = timezone.now()
order.save()
try:
models.Order.objects.allocate_timeslots(order)
# return JsonResponse({'ok': 1})
except TimeSlotConflict:
logger.warning('timeslot conflict, do refund, order_id: '+order_id)
# 微信通知用户失败信息
send_pay_fail_to_user(open_id, order_id)
# 短信通知家长
try:
phone = order.parent.user.profile.phone
tpl_send_sms(phone, TPL_STU_PAY_FAIL)
except Exception as ex:
logger.error(ex)
# 退款事宜操作
try:
models.Order.objects.refund(
order, '课程被抢占,自动退款', order.parent.user)
except OrderStatusIncorrect as e:
logger.error(e)
raise e
except RefundError as e:
logger.error(e)
raise e
return # 没有其他错误, 直接返回
# 微信通知用户购课成功信息
send_pay_info_to_user(open_id, order_id)
def _get_wx_jsapi_ticket(access_token):
jsapi_ticket = _get_wx_jsapi_ticket_from_db()
msg = None
if not jsapi_ticket:
result = wx_get_jsapi_ticket(access_token)
if result['ok']:
jsapi_ticket = result['ticket']
else:
msg = result['msg']
return jsapi_ticket, msg
def _get_wx_jsapi_ticket_from_db():
tk = models.WeiXinToken.objects.filter(token_type=models.WeiXinToken.JSAPI_TICKET).order_by('-id').first()
if tk and tk.token and (not tk.is_token_expired()):
return tk.token
return None
def _get_wx_token():
token = _get_wx_token_from_db()
msg = None
if not token:
result = wx_get_token()
if result['ok']:
token = result['token']
else:
msg = result['msg']
return token, msg
def _get_wx_token_from_db():
tk = models.WeiXinToken.objects.filter(token_type=models.WeiXinToken.ACCESS_TOKEN).order_by('-id').first()
if tk and tk.token and (not tk.is_token_expired()):
return tk.token
return None
@csrf_exempt
def wx_pay_notify_view(request):
"""
接受微信支付结果异步通知view
"""
req_json = resolve_wx_pay_notify(request)
if not req_json['ok']:
return HttpResponse(wx_dict2xml({'return_code': WX_FAIL, 'return_msg': ''}))
data = req_json['data']
openid = data['openid']
wx_order_id = data['transaction_id']
order_id = data['out_trade_no']
try:
set_order_paid(order_id=order_id, open_id=openid)
except:
pass # 忽略错误, 微信端直接关闭页面
return HttpResponse(wx_dict2xml({'return_code': WX_SUCCESS, 'return_msg': ''}))
@csrf_exempt
def get_wx_token(request):
retToken, retMsg = _get_wx_token()
if retMsg:
return JsonResponse({'ok': False, 'msg': retMsg, 'code': -1})
else:
return JsonResponse({'ok': True, 'token': retToken, 'code': 0})
@csrf_exempt
def send_template_msg(request):
tk = get_wx_token(request)
if tk.status_code == 200:
content = json.loads(tk.content.decode())
token = content['token']
tmpmsg_url = WX_TPL_MSG_URL.format(token=token)
ct = getContentData(request)
ct['access_token'] = token
json_template = json.dumps(ct)
req = requests.post(tmpmsg_url, data=json.dumps(ct))
retText = json.loads(req.text)
msgid = None
if 'msgid' in retText:
msgid = retText['msgid']
return JsonResponse({'ok': True, 'msgid': msgid, 'code': 0})
else:
return JsonResponse({'ok': False, 'msg': '获取token错误', 'code': -1})
def getContentData(request):
temptype = request.GET.get("temptype", None)
if not temptype:
temptype = request.POST.get("temptype", None)
if temptype == 'payok':
return template_msg_data_pay_ok(request)
elif temptype == 'payinfo':
return template_msg_data_pay_info(request)
return {}
# 报名缴费成功
def template_msg_data_pay_ok(request):
tempId = settings.WECHAT_PAY_OK_TEMPLATE
toUser = request.GET.get("toUser", None)
if not toUser:
toUser = request.POST.get("toUser", None)
first = request.GET.get("first", None)
if not first:
openid = request.POST.get("first", None)
kw1 = request.GET.get("kw1", None)
if not kw1:
openid = request.POST.get("kw1", None)
kw2 = request.GET.get("kw2", None)
if not kw2:
kw2 = request.POST.get("kw2", None)
kw3 = request.GET.get("kw3", None)
if not kw3:
kw3 = request.POST.get("kw3", None)
kw4 = request.GET.get("kw4", None)
if not kw4:
kw4 = request.POST.get("kw4", None)
kw5 = request.GET.get("kw5", None)
if not kw5:
kw5 = request.POST.get("kw5", None)
remark = request.GET.get("remark", None)
if not remark:
remark = request.POST.get("remark", None)
return {
"access_token": None,
"touser": toUser,
"template_id": tempId,
"data": {
"first": {
"value": first
},
"keyword1": {
"value": kw1
},
"keyword2": {
"value": kw2
},
"keyword3": {
"value": kw3
},
"keyword4": {
"value": kw4
},
"keyword5": {
"value": kw5
},
"remark": {
"value": remark
}
}
}
# 支付提醒:支付提醒,支付失败
def template_msg_data_pay_info(request):
tempId = settings.WECHAT_PAY_INFO_TEMPLATE
toUser = request.GET.get("toUser", None)
if not toUser:
toUser = request.POST.get("toUser", None)
first = request.GET.get("first", None)
if not first:
first = request.POST.get("first", None)
kw1 = request.GET.get("kw1", None)
if not kw1:
kw1 = request.POST.get("kw1", None)
kw2 = request.GET.get("kw2", None)
if not kw2:
kw2 = request.POST.get("kw2", None)
kw3 = request.GET.get("kw3", None)
if not kw3:
kw3 = request.POST.get("kw3", None)
kw4 = request.GET.get("kw4", None)
if not kw4:
kw4 = request.POST.get("kw4", None)
kw5 = request.GET.get("kw5", None)
if not kw5:
kw5 = request.POST.get("kw5", None)
remark = request.GET.get("remark", None)
if not remark:
remark = request.POST.get("remark", None)
return {
"access_token": None,
"touser": toUser,
"template_id": tempId,
"data": {
"first": {
"value": first
},
"keyword1": {
"value": kw1
},
"keyword2": {
"value": kw2
},
"keyword3": {
"value": kw3
},
"keyword4": {
"value": kw4
},
"keyword5": {
"value": kw5
},
"remark": {
"value": remark
}
}
}
def send_pay_info_to_user(openid, order_no):
"""
给微信用户发送购课成功信息
"""
order = models.Order.objects.get(order_id=order_no)
data = {
"first": {
"value": "感谢您购买麻辣老师课程!"
},
"keyword1": {
"value": order.grade.name + order.subject.name
},
"keyword2": {
"value": order.teacher.name
},
"keyword3": {
"value": '课时费'
},
"keyword4": {
"value": order.parent.student_name or order.parent.user.profile.mask_phone()
},
"keyword5": {
"value": "%.2f元"%(order.to_pay/100)
},
"remark": {
"value": '有任何疑问请拨打客服电话'+settings.SERVICE_SUPPORT_TEL
}
}
tpl_id = settings.WECHAT_PAY_INFO_TEMPLATE
access_token, msg = _get_wx_token()
ret_json = wx_send_tpl_msg(access_token, tpl_id, openid, data)
if 'msgid' in ret_json:
return ret_json['msgid']
return False
def send_pay_fail_to_user(openid, order_no):
"""
给微信用户发送购课失败信息
"""
order = models.Order.objects.get(order_id=order_no)
data = {
"first": {
"value": "您好,该老师该时段课程已被抢购,您可重新选择课时进行购买。"
},
"keyword1": {
"value": order.grade.name + order.subject.name
},
"keyword2": {
"value": order.order_id
},
"remark": {
"value": '我们将在24小时内为您退款。退款事宜请联系客服:'+settings.SERVICE_SUPPORT_TEL
}
}
tpl_id = settings.WECHAT_PAY_FAIL_TEMPLATE
access_token, msg = _get_wx_token()
ret_json = wx_send_tpl_msg(access_token, tpl_id, openid, data)
if 'msgid' in ret_json:
return ret_json['msgid']
return False
@csrf_exempt
def teacher_view(request):
template_name = 'wechat/teacher/teacher.html'
openid = request.GET.get("openid", None)
if not openid:
openid = request.POST.get("openid", None)
teacherid = request.GET.get("teacherid", None)
if not teacherid:
teacherid = request.POST.get("teacherid", None)
teacher = None
gender = None
try:
teacher = models.Teacher.objects.get(id=teacherid)
profile = models.Profile.objects.get(user=teacher.user)
gender_dict = {"f": "女", "m": "男", "u": ""}
gender = gender_dict.get(profile.gender, "")
except models.Teacher.DoesNotExist:
return JsonResponse({'error': 'teacher not exist', 'code': -1})
except models.Profile.DoesNotExist:
return JsonResponse({'error': 'teacher profile not exist', 'code': -1})
memberService = models.Memberservice.objects.all()
achievements = models.Achievement.objects.filter(teacher=teacher).order_by('id')
grades_all = models.Grade.objects.all()
_heap = {}
grades_tree = []
for grade in grades_all:
if not grade.superset_id:
_temp = {'id':grade.id, 'name':grade.name, 'children':[]}
_heap[grade.id] = _temp
grades_tree.append(_temp)
else:
_temp = _heap[grade.superset_id]
_temp['children'].append({'id':grade.id, 'name':grade.name})
now_timestamp = int(time.time())
nonce_str = make_nonce_str()
access_token, msg = _get_wx_token()
jsapi_ticket, msg = _get_wx_jsapi_ticket(access_token)
cur_url = request.build_absolute_uri()
signature = wx_signature({'noncestr': nonce_str,
'jsapi_ticket': jsapi_ticket,
'timestamp': now_timestamp,
'url': cur_url})
context = {
"server_timestamp": now_timestamp,
"WX_APPID": settings.WEIXIN_APPID,
"WX_NONCE_STR": nonce_str,
"WX_SIGNATURE": signature,
"openid": openid,
"gender": gender,
"tags": list(teacher.tags.all()),
"achievements": achievements,
"memberService": list(memberService),
"subjects": models.Subject.objects.all,
"grades_tree": grades_tree,
"teacher_grade_ids": [grade.id for grade in teacher.grades()],
"teacher": teacher
}
return render(request, template_name, context)
@csrf_exempt
def getSchoolsWithDistance(request):
lat = request.POST.get("lat", None)
lng = request.POST.get("lng", None)
point = None
if lat is not None and lng is not None:
point = {
'lat': float(lat),
'lng': float(lng)
}
if not point:
JsonResponse({'ok': False, 'msg': 'no lat,lng', 'code': -1})
schools = models.School.objects.all()
ret = []
for school in schools:
pointB = None
sc = {
'name': school.name,
'img': school.get_thumbnail,
'address': school.address,
'region': school.region.name
}
if school.latitude is not None and school.longitude is not None:
pointB = {
'lat': school.latitude,
'lng': school.longitude
}
dis = calculateDistance(point, pointB)
sc['dis'] = dis
ret.append(sc)
ret = sorted(ret, key = lambda school: school['dis'] if 'dis' in school else 63710000)
for sc in ret:
if 'dis' in sc and sc['dis'] is not None:
sc['dis'] = sc['dis']/1000
return JsonResponse({'ok': True, 'schools': ret, 'code': 0})
def calculateDistance(pointA, pointB):
R = 6371000; #metres
toRadians = math.pi/180;
return math.acos(math.sin(toRadians * pointA["lat"]) * math.sin(toRadians * pointB["lat"]) + math.cos(toRadians * pointA["lat"]) * math.cos(
toRadians * pointB["lat"]) * math.cos(toRadians * pointB["lng"] - toRadians * pointA["lng"])) * R;
@csrf_exempt
def phone_page(request):
template_name = 'wechat/parent/reg_phone.html'
openid = request.GET.get("openid", None)
if not openid:
openid = request.POST.get("openid", None)
context = {
"openid": openid
}
return render(request, template_name, context)
@csrf_exempt
def add_openid(request):
phone = request.POST.get("phone", None)
code = request.POST.get("code", None)
openid = request.POST.get("openid", None)
if not openid:
return JsonResponse({
"result": False,
"code": -1
})
Profile = models.Profile
CheckCode = models.Checkcode
Parent = models.Parent
try:
profile = Profile.objects.get(phone=phone)
profile.wx_openid = openid
profile.save()
user = profile.user
user.backend = _get_default_bankend_path()
parent = Parent.objects.get(user=user)
except Profile.DoesNotExist:
# new user
user = Parent.new_parent()
parent = user.parent
profile = parent.user.profile
profile.phone = phone
profile.wx_openid = openid
profile.save()
except Parent.DoesNotExist:
parent = Parent(user=user)
parent.save()
if CheckCode.verify(phone, code)[0]:
return JsonResponse({
"result": True
})
else:
# 验证失败
return JsonResponse({
"result": False,
"code": -2
})
@csrf_exempt
def check_phone(request):
get_openid_url = 'https://api.weixin.qq.com/sns/oauth2/access_token?grant_type=authorization_code'
wx_code = request.GET.get('code', None)
teacherId = request.GET.get('state', None)
get_openid_url += '&appid=' + settings.WEIXIN_APPID
get_openid_url += '&secret=' + settings.WEIXIN_APP_SECRET
get_openid_url += '&code=' + wx_code
req = requests.get(get_openid_url)
ret = None
openid = None
logger.debug("..............................获取的wx_code:1..............")
logger.debug(wx_code)
logger.debug("..............................获取的wx_code:2..............")
if req.status_code == 200:
ret = json.loads(req.text)
if "openid" in ret:
openid = ret["openid"]
if "errcode" in ret:
logger.debug("..............................获取openid错误..1..............")
logger.debug(ret)
logger.debug("..............................获取openid错误..2..............")
else:
logger.debug("..............................获取openid, status_code错误................")
logger.debug(req.status_code)
if openid:
profiles = models.Profile.objects.filter(wx_openid=openid).order_by('-id')
lastOne = list(profiles) and profiles[0]
if lastOne:
logger.debug("..............................openid验证通过................")
return HttpResponseRedirect(reverse('wechat:order-course-choosing')+'?teacher_id='+str(teacherId)+'&openid='+openid)
context = {
"openid": openid,
"teacherId": teacherId,
"nextpage": reverse('wechat:order-course-choosing')+'?teacher_id='+str(teacherId)
}
logger.debug("..............................openid验证不通过,继续验证................")
return render(request, 'wechat/parent/reg_phone.html', context)
|
[
{
"type": "go_to",
"params": {
"line": 799,
"column": 17,
"text": null,
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 799,
"column": 17
},
"end": {
"line": 799,
"column": 17
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " or openid == 'None'",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
}
] |
python
|
{
"dataset": "bigcode/commitpack",
"split": "train",
"mode": "file",
"repo": "malaonline/iOS,malaonline/Android,malaonline/Android,malaonline/iOS,malaonline/Server,malaonline/Server,malaonline/Server,malaonline/iOS,malaonline/Server,malaonline/Android",
"status": "M",
"commit": "614d4e36efc76291e5f83560693412677331d2f8",
"parent": null,
"path": "server/wechat/views.py",
"old_path": "server/wechat/views.py",
"hunk_index": null,
"adapter_meta": {
"commit": {
"message": "WEC-27: add log\n",
"author_date": null,
"committer_date": null,
"stats": null,
"dataset_context": {
"dataset": "bigcode/commitpack",
"split": "train"
}
},
"file": {}
}
}
|
"""
Internal tasks are tasks that are started from the teuthology infrastructure.
Note that there is no corresponding task defined for this module. All of
the calls are made from other modules, most notably teuthology/run.py
"""
from cStringIO import StringIO
import contextlib
import logging
import os
import time
import yaml
import subprocess
from teuthology import lockstatus
from teuthology import lock
from teuthology import misc
from teuthology import provision
from teuthology.job_status import get_status, set_status
from teuthology.config import config as teuth_config
from teuthology.parallel import parallel
from teuthology.suite import has_packages_for_distro
from ..orchestra import cluster, remote, run
from .. import report
log = logging.getLogger(__name__)
@contextlib.contextmanager
def base(ctx, config):
"""
Create the test directory that we will be using on the remote system
"""
log.info('Creating test directory...')
testdir = misc.get_testdir(ctx)
run.wait(
ctx.cluster.run(
args=[
'mkdir', '-p', '-m0755', '--',
testdir,
],
wait=False,
)
)
try:
yield
finally:
log.info('Tidying up after the test...')
# if this fails, one of the earlier cleanups is flawed; don't
# just cram an rm -rf here
run.wait(
ctx.cluster.run(
args=[
'rmdir',
'--',
testdir,
],
wait=False,
),
)
@contextlib.contextmanager
def lock_machines(ctx, config):
"""
Lock machines. Called when the teuthology run finds and locks
new machines. This is not called if the one has teuthology-locked
machines and placed those keys in the Targets section of a yaml file.
"""
# It's OK for os_type and os_version to be None here. If we're trying
# to lock a bare metal machine, we'll take whatever is available. If
# we want a vps, defaults will be provided by misc.get_distro and
# misc.get_distro_version in provision.create_if_vm
os_type = ctx.config.get("os_type")
os_version = ctx.config.get("os_version")
arch = ctx.config.get('arch')
log.info('Locking machines...')
assert isinstance(config[0], int), 'config[0] must be an integer'
machine_type = config[1]
how_many = config[0]
# We want to make sure there are always this many machines available
to_reserve = teuth_config.reserve_machines
assert isinstance(to_reserve, int), 'reserve_machines must be integer'
assert (to_reserve >= 0), 'reserve_machines should >= 0'
# change the status during the locking process
report.try_push_job_info(ctx.config, dict(status='waiting'))
while True:
# get a candidate list of machines
machines = lock.list_locks(machine_type=machine_type, up=True,
locked=False, count=how_many + to_reserve)
if machines is None:
if ctx.block:
log.error('Error listing machines, trying again')
time.sleep(20)
continue
else:
raise RuntimeError('Error listing machines')
# make sure there are machines for non-automated jobs to run
if len(machines) < to_reserve + how_many and ctx.owner.startswith('scheduled'):
if ctx.block:
log.info(
'waiting for more machines to be free (need %s + %s, have %s)...',
to_reserve,
how_many,
len(machines),
)
time.sleep(10)
continue
else:
assert 0, ('not enough machines free; need %s + %s, have %s' %
(to_reserve, how_many, len(machines)))
newly_locked = lock.lock_many(ctx, how_many, machine_type, ctx.owner,
ctx.archive, os_type, os_version, arch)
if not newly_locked and not isinstance(newly_locked, list):
raise RuntimeError('Invalid parameters specified')
if len(newly_locked) == how_many:
vmlist = []
for lmach in newly_locked:
if misc.is_vm(lmach):
vmlist.append(lmach)
if vmlist:
log.info('Waiting for virtual machines to come up')
keys_dict = dict()
loopcount = 0
while len(keys_dict) != len(vmlist):
loopcount += 1
time.sleep(10)
keys_dict = lock.ssh_keyscan(vmlist)
log.info('virtual machine is still unavailable')
if loopcount == 40:
loopcount = 0
log.info('virtual machine(s) still not up, ' +
'recreating unresponsive ones.')
for guest in vmlist:
if guest not in keys_dict.keys():
log.info('recreating: ' + guest)
full_name = misc.canonicalize_hostname(guest)
provision.destroy_if_vm(ctx, full_name)
provision.create_if_vm(ctx, full_name)
if lock.do_update_keys(keys_dict):
log.info("Error in virtual machine keys")
newscandict = {}
for dkey in newly_locked.iterkeys():
stats = lockstatus.get_status(dkey)
newscandict[dkey] = stats['ssh_pub_key']
ctx.config['targets'] = newscandict
else:
ctx.config['targets'] = newly_locked
locked_targets = yaml.safe_dump(
ctx.config['targets'],
default_flow_style=False
).splitlines()
log.info('\n '.join(['Locked targets:', ] + locked_targets))
# successfully locked machines, change status back to running
report.try_push_job_info(ctx.config, dict(status='running'))
break
elif not ctx.block:
assert 0, 'not enough machines are available'
log.warn('Could not lock enough machines, waiting...')
time.sleep(10)
try:
yield
finally:
# If both unlock_on_failure and nuke-on-error are set, don't unlock now
# because we're just going to nuke (and unlock) later.
unlock_on_failure = (
ctx.config.get('unlock_on_failure', False)
and not ctx.config.get('nuke-on-error', False)
)
if get_status(ctx.summary) == 'pass' or unlock_on_failure:
log.info('Unlocking machines...')
for machine in ctx.config['targets'].iterkeys():
lock.unlock_one(ctx, machine, ctx.owner, ctx.archive)
def save_config(ctx, config):
"""
Store the config in a yaml file
"""
log.info('Saving configuration')
if ctx.archive is not None:
with file(os.path.join(ctx.archive, 'config.yaml'), 'w') as f:
yaml.safe_dump(ctx.config, f, default_flow_style=False)
def check_lock(ctx, config, check_up=True):
"""
Check lock status of remote machines.
"""
if not teuth_config.lock_server or ctx.config.get('check-locks') is False:
log.info('Lock checking disabled.')
return
log.info('Checking locks...')
for machine in ctx.config['targets'].iterkeys():
status = lockstatus.get_status(machine)
log.debug('machine status is %s', repr(status))
assert status is not None, \
'could not read lock status for {name}'.format(name=machine)
if check_up:
assert status['up'], 'machine {name} is marked down'.format(
name=machine
)
assert status['locked'], \
'machine {name} is not locked'.format(name=machine)
assert status['locked_by'] == ctx.owner, \
'machine {name} is locked by {user}, not {owner}'.format(
name=machine,
user=status['locked_by'],
owner=ctx.owner,
)
def check_packages(ctx, config):
"""
Checks gitbuilder to determine if there are missing packages for this job.
If there are missing packages, fail the job.
"""
log.info("Checking packages...")
os_type = ctx.config.get("os_type", None)
sha1 = ctx.config.get("sha1", None)
# We can only do this check if there are a defined sha1 and os_type
# in the job config.
if os_type and sha1:
log.info(
"Checking packages for os_type '{os}' and ceph hash '{ver}'".format(
os=os_type,
ver=sha1,
)
)
if not has_packages_for_distro(sha1, os_type):
msg = "Packages for os_type '{os}' and ceph hash '{ver}' not found"
msg = msg.format(
os=os_type,
ver=sha1,
)
log.error(msg)
# set the failure message and update paddles with the status
ctx.summary["failure_reason"] = msg
set_status(ctx.summary, "dead")
report.try_push_job_info(ctx.config, dict(status='dead'))
raise RuntimeError(msg)
else:
log.info(
"Checking packages skipped, missing os_type '{os}' or ceph hash '{ver}'".format(
os=os_type,
ver=sha1,
)
)
@contextlib.contextmanager
def timer(ctx, config):
"""
Start the timer used by teuthology
"""
log.info('Starting timer...')
start = time.time()
try:
yield
finally:
duration = time.time() - start
log.info('Duration was %f seconds', duration)
ctx.summary['duration'] = duration
def add_remotes(ctx, config):
"""
Create a ctx.cluster object populated with remotes mapped to roles
"""
remotes = []
machs = []
for name in ctx.config['targets'].iterkeys():
machs.append(name)
for t, key in ctx.config['targets'].iteritems():
t = misc.canonicalize_hostname(t)
try:
if ctx.config['sshkeys'] == 'ignore':
key = None
except (AttributeError, KeyError):
pass
rem = remote.Remote(name=t, host_key=key, keep_alive=True)
remotes.append(rem)
ctx.cluster = cluster.Cluster()
if 'roles' in ctx.config:
for rem, roles in zip(remotes, ctx.config['roles']):
assert all(isinstance(role, str) for role in roles), \
"Roles in config must be strings: %r" % roles
ctx.cluster.add(rem, roles)
log.info('roles: %s - %s' % (rem, roles))
else:
for rem in remotes:
ctx.cluster.add(rem, rem.name)
def connect(ctx, config):
"""
Connect to all remotes in ctx.cluster
"""
log.info('Opening connections...')
for rem in ctx.cluster.remotes.iterkeys():
log.debug('connecting to %s', rem.name)
rem.connect()
def push_inventory(ctx, config):
if not teuth_config.lock_server:
return
def push():
for rem in ctx.cluster.remotes.keys():
info = rem.inventory_info
lock.update_inventory(info)
try:
push()
except Exception:
log.exception("Error pushing inventory")
def serialize_remote_roles(ctx, config):
"""
Provides an explicit mapping for which remotes have been assigned what roles
So that other software can be loosely coupled to teuthology
"""
if ctx.archive is not None:
with file(os.path.join(ctx.archive, 'info.yaml'), 'r+') as info_file:
info_yaml = yaml.safe_load(info_file)
info_file.seek(0)
info_yaml['cluster'] = dict([(rem.name, {'roles': roles}) for rem, roles in ctx.cluster.remotes.iteritems()])
yaml.safe_dump(info_yaml, info_file, default_flow_style=False)
def check_ceph_data(ctx, config):
"""
Check for old /var/lib/ceph directories and detect staleness.
"""
log.info('Checking for old /var/lib/ceph...')
processes = ctx.cluster.run(
args=[
'test', '!', '-e', '/var/lib/ceph',
],
wait=False,
)
failed = False
for proc in processes:
try:
proc.wait()
except run.CommandFailedError:
log.error('Host %s has stale /var/lib/ceph, check lock and nuke/cleanup.', proc.remote.shortname)
failed = True
if failed:
raise RuntimeError('Stale /var/lib/ceph detected, aborting.')
def check_conflict(ctx, config):
"""
Note directory use conflicts and stale directories.
"""
log.info('Checking for old test directory...')
testdir = misc.get_testdir(ctx)
processes = ctx.cluster.run(
args=[
'test', '!', '-e', testdir,
],
wait=False,
)
failed = False
for proc in processes:
try:
proc.wait()
except run.CommandFailedError:
log.error('Host %s has stale test directory %s, check lock and cleanup.', proc.remote.shortname, testdir)
failed = True
if failed:
raise RuntimeError('Stale jobs detected, aborting.')
def fetch_binaries_for_coredumps(path, remote):
"""
Pul ELFs (debug and stripped) for each coredump found
"""
# Check for Coredumps:
coredump_path = os.path.join(path, 'coredump')
if os.path.isdir(coredump_path):
log.info('Transferring binaries for coredumps...')
for dump in os.listdir(coredump_path):
# Pull program from core file
dump_path = os.path.join(coredump_path, dump)
dump_info = subprocess.Popen(['file', dump_path],
stdout=subprocess.PIPE)
dump_out = dump_info.communicate()
# Parse file output to get program, Example output:
# 1422917770.7450.core: ELF 64-bit LSB core file x86-64, version 1 (SYSV), SVR4-style, \
# from 'radosgw --rgw-socket-path /home/ubuntu/cephtest/apache/tmp.client.0/fastcgi_soc'
dump_program = dump_out.split("from '")[1].split(' ')[0]
# Find path on remote server:
r = remote.run(args=['which', dump_program], stdout=StringIO())
remote_path = r.stdout.getvalue()
# Pull remote program into coredump folder:
remote._sftp_get_file(remote_path, os.path.join(coredump_path,
dump_program))
# Pull Debug symbols:
debug_path = os.path.join('/usr/lib/debug', remote_path)
# RPM distro's append their non-stripped ELF's with .debug
# When deb based distro's do not.
if remote.system_type == 'rpm':
debug_path = '{debug_path}.debug'.format(debug_path=debug_path)
remote.get_file(debug_path, coredump_path)
@contextlib.contextmanager
def archive(ctx, config):
"""
Handle the creation and deletion of the archive directory.
"""
log.info('Creating archive directory...')
archive_dir = misc.get_archive_dir(ctx)
run.wait(
ctx.cluster.run(
args=[
'install', '-d', '-m0755', '--', archive_dir,
],
wait=False,
)
)
try:
yield
except Exception:
# we need to know this below
set_status(ctx.summary, 'fail')
raise
finally:
passed = get_status(ctx.summary) == 'pass'
if ctx.archive is not None and \
not (ctx.config.get('archive-on-error') and passed):
log.info('Transferring archived files...')
logdir = os.path.join(ctx.archive, 'remote')
if (not os.path.exists(logdir)):
os.mkdir(logdir)
for rem in ctx.cluster.remotes.iterkeys():
path = os.path.join(logdir, rem.shortname)
misc.pull_directory(rem, archive_dir, path)
# Check for coredumps and pull binaries
fetch_binaries_for_coredumps(path, rem)
log.info('Removing archive directory...')
run.wait(
ctx.cluster.run(
args=[
'rm',
'-rf',
'--',
archive_dir,
],
wait=False,
),
)
@contextlib.contextmanager
def sudo(ctx, config):
"""
Enable use of sudo
"""
log.info('Configuring sudo...')
sudoers_file = '/etc/sudoers'
backup_ext = '.orig.teuthology'
tty_expr = r's/^\([^#]*\) \(requiretty\)/\1 !\2/g'
pw_expr = r's/^\([^#]*\) !\(visiblepw\)/\1 \2/g'
run.wait(
ctx.cluster.run(
args="sudo sed -i{ext} -e '{tty}' -e '{pw}' {path}".format(
ext=backup_ext, tty=tty_expr, pw=pw_expr,
path=sudoers_file
),
wait=False,
)
)
try:
yield
finally:
log.info('Restoring {0}...'.format(sudoers_file))
ctx.cluster.run(
args="sudo mv -f {path}{ext} {path}".format(
path=sudoers_file, ext=backup_ext
)
)
@contextlib.contextmanager
def coredump(ctx, config):
"""
Stash a coredump of this system if an error occurs.
"""
log.info('Enabling coredump saving...')
archive_dir = misc.get_archive_dir(ctx)
run.wait(
ctx.cluster.run(
args=[
'install', '-d', '-m0755', '--',
'{adir}/coredump'.format(adir=archive_dir),
run.Raw('&&'),
'sudo', 'sysctl', '-w', 'kernel.core_pattern={adir}/coredump/%t.%p.core'.format(adir=archive_dir),
],
wait=False,
)
)
try:
yield
finally:
run.wait(
ctx.cluster.run(
args=[
'sudo', 'sysctl', '-w', 'kernel.core_pattern=core',
run.Raw('&&'),
# don't litter the archive dir if there were no cores dumped
'rmdir',
'--ignore-fail-on-non-empty',
'--',
'{adir}/coredump'.format(adir=archive_dir),
],
wait=False,
)
)
# set status = 'fail' if the dir is still there = coredumps were
# seen
for rem in ctx.cluster.remotes.iterkeys():
r = rem.run(
args=[
'if', 'test', '!', '-e', '{adir}/coredump'.format(adir=archive_dir), run.Raw(';'), 'then',
'echo', 'OK', run.Raw(';'),
'fi',
],
stdout=StringIO(),
)
if r.stdout.getvalue() != 'OK\n':
log.warning('Found coredumps on %s, flagging run as failed', rem)
set_status(ctx.summary, 'fail')
if 'failure_reason' not in ctx.summary:
ctx.summary['failure_reason'] = \
'Found coredumps on {rem}'.format(rem=rem)
@contextlib.contextmanager
def syslog(ctx, config):
"""
start syslog / stop syslog on exit.
"""
if ctx.archive is None:
# disable this whole feature if we're not going to archive the data anyway
yield
return
log.info('Starting syslog monitoring...')
archive_dir = misc.get_archive_dir(ctx)
run.wait(
ctx.cluster.run(
args=[
'mkdir', '-p', '-m0755', '--',
'{adir}/syslog'.format(adir=archive_dir),
],
wait=False,
)
)
CONF = '/etc/rsyslog.d/80-cephtest.conf'
conf_fp = StringIO('''
kern.* -{adir}/syslog/kern.log;RSYSLOG_FileFormat
*.*;kern.none -{adir}/syslog/misc.log;RSYSLOG_FileFormat
'''.format(adir=archive_dir))
try:
for rem in ctx.cluster.remotes.iterkeys():
misc.sudo_write_file(
remote=rem,
path=CONF,
data=conf_fp,
)
conf_fp.seek(0)
run.wait(
ctx.cluster.run(
args=[
'sudo',
'service',
# a mere reload (SIGHUP) doesn't seem to make
# rsyslog open the files
'rsyslog',
'restart',
],
wait=False,
),
)
yield
finally:
log.info('Shutting down syslog monitoring...')
run.wait(
ctx.cluster.run(
args=[
'sudo',
'rm',
'-f',
'--',
CONF,
run.Raw('&&'),
'sudo',
'service',
'rsyslog',
'restart',
],
wait=False,
),
)
# race condition: nothing actually says rsyslog had time to
# flush the file fully. oh well.
log.info('Checking logs for errors...')
for rem in ctx.cluster.remotes.iterkeys():
log.debug('Checking %s', rem.name)
r = rem.run(
args=[
'egrep', '--binary-files=text',
'\\bBUG\\b|\\bINFO\\b|\\bDEADLOCK\\b',
run.Raw('{adir}/syslog/*.log'.format(adir=archive_dir)),
run.Raw('|'),
'grep', '-v', 'task .* blocked for more than .* seconds',
run.Raw('|'),
'grep', '-v', 'lockdep is turned off',
run.Raw('|'),
'grep', '-v', 'trying to register non-static key',
run.Raw('|'),
'grep', '-v', 'DEBUG: fsize', # xfs_fsr
run.Raw('|'),
'grep', '-v', 'CRON', # ignore cron noise
run.Raw('|'),
'grep', '-v', 'BUG: bad unlock balance detected', # #6097
run.Raw('|'),
'grep', '-v', 'inconsistent lock state', # FIXME see #2523
run.Raw('|'),
'grep', '-v', '*** DEADLOCK ***', # part of lockdep output
run.Raw('|'),
'grep', '-v', 'INFO: possible irq lock inversion dependency detected', # FIXME see #2590 and #147
run.Raw('|'),
'grep', '-v', 'INFO: NMI handler (perf_event_nmi_handler) took too long to run',
run.Raw('|'),
'grep', '-v', 'INFO: recovery required on readonly',
run.Raw('|'),
'head', '-n', '1',
],
stdout=StringIO(),
)
stdout = r.stdout.getvalue()
if stdout != '':
log.error('Error in syslog on %s: %s', rem.name, stdout)
set_status(ctx.summary, 'fail')
if 'failure_reason' not in ctx.summary:
ctx.summary['failure_reason'] = \
"'{error}' in syslog".format(error=stdout)
log.info('Compressing syslogs...')
run.wait(
ctx.cluster.run(
args=[
'find',
'{adir}/syslog'.format(adir=archive_dir),
'-name',
'*.log',
'-print0',
run.Raw('|'),
'sudo',
'xargs',
'-0',
'--no-run-if-empty',
'--',
'gzip',
'--',
],
wait=False,
),
)
def vm_setup(ctx, config):
"""
Look for virtual machines and handle their initialization
"""
all_tasks = [x.keys()[0] for x in ctx.config['tasks']]
need_chef = False
if 'chef' in all_tasks or 'kernel' in all_tasks:
need_chef = True
with parallel() as p:
editinfo = os.path.join(os.path.dirname(__file__),'edit_sudoers.sh')
for rem in ctx.cluster.remotes.iterkeys():
mname = rem.shortname
if misc.is_vm(mname):
r = rem.run(args=['test', '-e', '/ceph-qa-ready',],
stdout=StringIO(),
check_status=False,)
if r.returncode != 0:
p1 = subprocess.Popen(['cat', editinfo], stdout=subprocess.PIPE)
p2 = subprocess.Popen(
[
'ssh',
'-o', 'StrictHostKeyChecking=no',
'-t', '-t',
str(rem),
'sudo',
'sh'
],
stdin=p1.stdout, stdout=subprocess.PIPE
)
_, err = p2.communicate()
if err:
log.info("Edit of /etc/sudoers failed: %s", err)
if need_chef:
p.spawn(_download_and_run_chef, rem)
def _download_and_run_chef(remote_):
"""
Run ceph_qa_chef.
"""
log.info('Running ceph_qa_chef on %s', remote_)
remote_.run(
args=[
'wget', '-q', '-O-',
'http://git.ceph.com/?p=ceph-qa-chef.git;a=blob_plain;f=solo/solo-from-scratch;hb=HEAD',
run.Raw('|'),
'sh',
],
label="run chef solo-from-scratch"
)
|
[
{
"type": "go_to",
"params": {
"line": 20,
"column": 52,
"text": null,
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 20,
"column": 52
},
"end": {
"line": 20,
"column": 52
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": ", get_install_task_flavor",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 222,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 222,
"column": 0
},
"end": {
"line": 223,
"column": 40
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " sha1 = ctx.config.get(\"",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "sha1\")\n os_type ",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "= ctx.config.get(\"os_type\")\n flavo",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "r = get_in",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 224,
"column": 19,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "stall_task_flavor(ctx",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": ".config)\n",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 227,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " template = \"",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "Checking packages for os_type,'{os",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 227,
"column": 47,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 227,
"column": 54,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "}' flavor '{flav}' and\" \\\n",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " \" ceph hash '{ver}",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "'\"\n",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
},
{
"type": "go_to",
"params": {
"line": 229,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 228,
"column": 12,
"text": null,
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 228,
"column": 12
},
"end": {
"line": 228,
"column": 72
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "template",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 230,
"column": 0,
"text": null,
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " ",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": " flav=fl",
"start": null,
"end": null
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": "avor,\n",
"start": null,
"end": null
}
},
{
"type": "go_to",
"params": {
"line": 233,
"column": 52,
"text": null,
"start": null,
"end": null
}
},
{
"type": "select",
"params": {
"line": null,
"column": null,
"text": null,
"start": {
"line": 233,
"column": 52
},
"end": {
"line": 233,
"column": 52
}
}
},
{
"type": "insert",
"params": {
"line": null,
"column": null,
"text": ", flavor",
"start": null,
"end": null
}
},
{
"type": "save",
"params": null
}
] |
python
|
{
"dataset": "bigcode/commitpack",
"split": "train",
"mode": "file",
"repo": "caibo2014/teuthology,SUSE/teuthology,dreamhost/teuthology,ivotron/teuthology,caibo2014/teuthology,SUSE/teuthology,ivotron/teuthology,t-miyamae/teuthology,dreamhost/teuthology,dmick/teuthology,zhouyuan/teuthology,ktdreyer/teuthology,robbat2/teuthology,ceph/teuthology,dmick/teuthology,dmick/teuthology,ktdreyer/teuthology,zhouyuan/teuthology,robbat2/teuthology,SUSE/teuthology,michaelsevilla/teuthology,michaelsevilla/teuthology,ceph/teuthology,t-miyamae/teuthology",
"status": "M",
"commit": "485d23f17c080de2a6487698236633ceedf6d7db",
"parent": null,
"path": "teuthology/task/internal.py",
"old_path": "teuthology/task/internal.py",
"hunk_index": null,
"adapter_meta": {
"commit": {
"message": "Don't assume 'basic' flavor in check_packages()\n\nSigned-off-by: Zack Cerza <d7cdf09fc0f0426e98c9978ee42da5d61fa54986@redhat.com>\n",
"author_date": null,
"committer_date": null,
"stats": null,
"dataset_context": {
"dataset": "bigcode/commitpack",
"split": "train"
}
},
"file": {}
}
}
|
"# -*- coding: utf-8 -*-\n#\n# pylast -\n# A Python interface to Last.fm and Libre.fm\n#\n# Copy(...TRUNCATED)
| [{"type":"go_to","params":{"line":733,"column":0,"text":null,"start":null,"end":null}},{"type":"sele(...TRUNCATED)
|
python
| {"dataset":"bigcode/commitpack","split":"train","mode":"file","repo":"pylast/pylast,hugovk/pylast","(...TRUNCATED)
|
"#!/usr/bin/python\n# -*- coding: ascii -*-\n#######################################################(...TRUNCATED)
| [{"type":"go_to","params":{"line":0,"column":0,"text":null,"start":null,"end":null}},{"type":"select(...TRUNCATED)
|
python
| {"dataset":"bigcode/commitpack","split":"train","mode":"file","repo":"masayuko/beaker,enomado/beaker(...TRUNCATED)
|
"import argparse\nimport os\nimport subprocess\nimport sys\nimport pty\nimport multiprocessing\n\nfr(...TRUNCATED)
| [{"type":"go_to","params":{"line":0,"column":0,"text":null,"start":null,"end":null}},{"type":"insert(...TRUNCATED)
|
python
| {"dataset":"bigcode/commitpack","split":"train","mode":"file","repo":"mikanbako/sphinx-autobuild,Sta(...TRUNCATED)
|
"#!/usr/bin/env python\n\n\"\"\"Run some linters on files of various types.\"\"\"\n\n\nUSAGE = \"\"\(...TRUNCATED)
| [{"type":"go_to","params":{"line":39,"column":0,"text":null,"start":null,"end":null}},{"type":"inser(...TRUNCATED)
|
python
| {"dataset":"bigcode/commitpack","split":"train","mode":"file","repo":"Khan/khan-linter,Khan/khan-lin(...TRUNCATED)
|
End of preview.
No dataset card yet
- Downloads last month
- -