Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
MMCV
Commits
eadd1e0f
Commit
eadd1e0f
authored
Aug 27, 2018
by
Kai Chen
Browse files
re-organize io module as a subpackage
parent
5f2e58c5
Changes
10
Hide whitespace changes
Inline
Side-by-side
Showing
10 changed files
with
265 additions
and
1 deletion
+265
-1
mmcv/__init__.py
mmcv/__init__.py
+1
-1
mmcv/fileio/__init__.py
mmcv/fileio/__init__.py
+3
-0
mmcv/fileio/io.py
mmcv/fileio/io.py
+76
-0
mmcv/fileio/parse.py
mmcv/fileio/parse.py
+50
-0
mmcv/fileio/processors/__init__.py
mmcv/fileio/processors/__init__.py
+4
-0
mmcv/fileio/processors/base.py
mmcv/fileio/processors/base.py
+31
-0
mmcv/fileio/processors/json.py
mmcv/fileio/processors/json.py
+29
-0
mmcv/fileio/processors/pickle.py
mmcv/fileio/processors/pickle.py
+33
-0
mmcv/fileio/processors/yaml.py
mmcv/fileio/processors/yaml.py
+38
-0
tests/test_fileio.py
tests/test_fileio.py
+0
-0
No files found.
mmcv/__init__.py
View file @
eadd1e0f
from
.utils
import
*
from
.utils
import
*
from
.io
import
*
from
.
file
io
import
*
from
.opencv_info
import
*
from
.opencv_info
import
*
from
.image
import
*
from
.image
import
*
from
.video
import
*
from
.video
import
*
...
...
mmcv/fileio/__init__.py
0 → 100644
View file @
eadd1e0f
from
.io
import
*
from
.processors
import
*
from
.parse
import
*
mmcv/io.py
→
mmcv/
fileio/
io.py
View file @
eadd1e0f
from
abc
import
ABCMeta
,
abstractmethod
from
.processors
import
JsonProcessor
,
PickleProcessor
,
YamlProcessor
import
json
from
..utils
import
is_str
import
yaml
from
six.moves
import
cPickle
as
pickle
try
:
from
yaml
import
CLoader
as
Loader
,
CDumper
as
Dumper
except
ImportError
:
from
yaml
import
Loader
,
Dumper
from
mmcv.utils
import
is_str
__all__
=
[
'load'
,
'dump'
,
'list_from_file'
,
'dict_from_file'
]
class
BaseFileProcessor
(
object
):
__metaclass__
=
ABCMeta
@
staticmethod
@
abstractmethod
def
load_from_path
(
filepath
,
**
kwargs
):
pass
@
staticmethod
@
abstractmethod
def
load_from_fileobj
(
file
,
**
kwargs
):
pass
@
staticmethod
@
abstractmethod
def
dump_to_str
(
obj
,
**
kwargs
):
pass
@
staticmethod
@
abstractmethod
def
dump_to_path
(
obj
,
filepath
,
**
kwargs
):
pass
@
staticmethod
@
abstractmethod
def
dump_to_fileobj
(
obj
,
file
,
**
kwargs
):
pass
class
JsonProcessor
(
BaseFileProcessor
):
@
staticmethod
def
load_from_path
(
filepath
):
with
open
(
filepath
,
'r'
)
as
f
:
obj
=
json
.
load
(
f
)
return
obj
@
staticmethod
def
load_from_fileobj
(
file
):
return
json
.
load
(
file
)
@
staticmethod
def
dump_to_str
(
obj
,
**
kwargs
):
return
json
.
dumps
(
obj
,
**
kwargs
)
@
staticmethod
def
dump_to_path
(
obj
,
filepath
,
**
kwargs
):
with
open
(
filepath
,
'w'
)
as
f
:
json
.
dump
(
obj
,
f
,
**
kwargs
)
@
staticmethod
def
dump_to_fileobj
(
obj
,
file
,
**
kwargs
):
json
.
dump
(
obj
,
file
,
**
kwargs
)
class
YamlProcessor
(
BaseFileProcessor
):
@
staticmethod
def
load_from_path
(
filepath
,
**
kwargs
):
kwargs
.
setdefault
(
'Loader'
,
Loader
)
with
open
(
filepath
,
'r'
)
as
f
:
obj
=
yaml
.
load
(
f
,
**
kwargs
)
return
obj
@
staticmethod
def
load_from_fileobj
(
file
,
**
kwargs
):
kwargs
.
setdefault
(
'Loader'
,
Loader
)
return
yaml
.
load
(
file
,
**
kwargs
)
@
staticmethod
def
dump_to_str
(
obj
,
**
kwargs
):
kwargs
.
setdefault
(
'Dumper'
,
Dumper
)
return
yaml
.
dump
(
obj
,
**
kwargs
)
@
staticmethod
def
dump_to_path
(
obj
,
filepath
,
**
kwargs
):
kwargs
.
setdefault
(
'Dumper'
,
Dumper
)
with
open
(
filepath
,
'w'
)
as
f
:
yaml
.
dump
(
obj
,
f
,
**
kwargs
)
@
staticmethod
def
dump_to_fileobj
(
obj
,
file
,
**
kwargs
):
kwargs
.
setdefault
(
'Dumper'
,
Dumper
)
yaml
.
dump
(
obj
,
file
,
**
kwargs
)
class
PickleProcessor
(
BaseFileProcessor
):
@
staticmethod
def
load_from_path
(
filepath
,
**
kwargs
):
with
open
(
filepath
,
'rb'
)
as
f
:
obj
=
pickle
.
load
(
f
,
**
kwargs
)
return
obj
@
staticmethod
def
load_from_fileobj
(
file
,
**
kwargs
):
return
pickle
.
load
(
file
,
**
kwargs
)
@
staticmethod
def
dump_to_str
(
obj
,
**
kwargs
):
kwargs
.
setdefault
(
'protocol'
,
2
)
return
pickle
.
dumps
(
obj
,
**
kwargs
)
@
staticmethod
def
dump_to_path
(
obj
,
filepath
,
**
kwargs
):
kwargs
.
setdefault
(
'protocol'
,
2
)
with
open
(
filepath
,
'wb'
)
as
f
:
pickle
.
dump
(
obj
,
f
,
**
kwargs
)
@
staticmethod
def
dump_to_fileobj
(
obj
,
file
,
**
kwargs
):
kwargs
.
setdefault
(
'protocol'
,
2
)
pickle
.
dump
(
obj
,
file
,
**
kwargs
)
file_processors
=
{
file_processors
=
{
'json'
:
JsonProcessor
,
'json'
:
JsonProcessor
,
...
@@ -202,55 +74,3 @@ def dump(obj, file=None, file_format=None, **kwargs):
...
@@ -202,55 +74,3 @@ def dump(obj, file=None, file_format=None, **kwargs):
processor
.
dump_to_fileobj
(
obj
,
file
,
**
kwargs
)
processor
.
dump_to_fileobj
(
obj
,
file
,
**
kwargs
)
else
:
else
:
raise
TypeError
(
'"file" must be a filename str or a file-object'
)
raise
TypeError
(
'"file" must be a filename str or a file-object'
)
def
list_from_file
(
filename
,
prefix
=
''
,
offset
=
0
,
max_num
=
0
):
"""Load a text file and parse the content as a list of strings.
Args:
filename (str): Filename.
prefix (str): The prefix to be inserted to the begining of each item.
offset (int): The offset of lines.
max_num (int): The maximum number of lines to be read,
zeros and negatives mean no limitation.
Returns:
list[str]: A list of strings.
"""
cnt
=
0
item_list
=
[]
with
open
(
filename
,
'r'
)
as
f
:
for
_
in
range
(
offset
):
f
.
readline
()
for
line
in
f
:
if
max_num
>
0
and
cnt
>=
max_num
:
break
item_list
.
append
(
prefix
+
line
.
rstrip
(
'
\n
'
))
cnt
+=
1
return
item_list
def
dict_from_file
(
filename
,
key_type
=
str
):
"""Load a text file and parse the content as a dict.
Each line of the text file will be two or more columns splited by
whitespaces or tabs. The first column will be parsed as dict keys, and
the following columns will be parsed as dict values.
Args:
filename(str): Filename.
key_type(type): Type of the dict's keys. str is user by default and
type conversion will be performed if specified.
Returns:
dict: The parsed contents.
"""
mapping
=
{}
with
open
(
filename
,
'r'
)
as
f
:
for
line
in
f
:
items
=
line
.
rstrip
(
'
\n
'
).
split
()
assert
len
(
items
)
>=
2
key
=
key_type
(
items
[
0
])
val
=
items
[
1
:]
if
len
(
items
)
>
2
else
items
[
1
]
mapping
[
key
]
=
val
return
mapping
mmcv/fileio/parse.py
0 → 100644
View file @
eadd1e0f
def
list_from_file
(
filename
,
prefix
=
''
,
offset
=
0
,
max_num
=
0
):
"""Load a text file and parse the content as a list of strings.
Args:
filename (str): Filename.
prefix (str): The prefix to be inserted to the begining of each item.
offset (int): The offset of lines.
max_num (int): The maximum number of lines to be read,
zeros and negatives mean no limitation.
Returns:
list[str]: A list of strings.
"""
cnt
=
0
item_list
=
[]
with
open
(
filename
,
'r'
)
as
f
:
for
_
in
range
(
offset
):
f
.
readline
()
for
line
in
f
:
if
max_num
>
0
and
cnt
>=
max_num
:
break
item_list
.
append
(
prefix
+
line
.
rstrip
(
'
\n
'
))
cnt
+=
1
return
item_list
def
dict_from_file
(
filename
,
key_type
=
str
):
"""Load a text file and parse the content as a dict.
Each line of the text file will be two or more columns splited by
whitespaces or tabs. The first column will be parsed as dict keys, and
the following columns will be parsed as dict values.
Args:
filename(str): Filename.
key_type(type): Type of the dict's keys. str is user by default and
type conversion will be performed if specified.
Returns:
dict: The parsed contents.
"""
mapping
=
{}
with
open
(
filename
,
'r'
)
as
f
:
for
line
in
f
:
items
=
line
.
rstrip
(
'
\n
'
).
split
()
assert
len
(
items
)
>=
2
key
=
key_type
(
items
[
0
])
val
=
items
[
1
:]
if
len
(
items
)
>
2
else
items
[
1
]
mapping
[
key
]
=
val
return
mapping
mmcv/fileio/processors/__init__.py
0 → 100644
View file @
eadd1e0f
from
.base
import
BaseFileProcessor
from
.json
import
JsonProcessor
from
.pickle
import
PickleProcessor
from
.yaml
import
YamlProcessor
mmcv/fileio/processors/base.py
0 → 100644
View file @
eadd1e0f
from
abc
import
ABCMeta
,
abstractmethod
class
BaseFileProcessor
(
object
):
__metaclass__
=
ABCMeta
@
staticmethod
@
abstractmethod
def
load_from_path
(
filepath
,
**
kwargs
):
pass
@
staticmethod
@
abstractmethod
def
load_from_fileobj
(
file
,
**
kwargs
):
pass
@
staticmethod
@
abstractmethod
def
dump_to_str
(
obj
,
**
kwargs
):
pass
@
staticmethod
@
abstractmethod
def
dump_to_path
(
obj
,
filepath
,
**
kwargs
):
pass
@
staticmethod
@
abstractmethod
def
dump_to_fileobj
(
obj
,
file
,
**
kwargs
):
pass
mmcv/fileio/processors/json.py
0 → 100644
View file @
eadd1e0f
import
json
from
.base
import
BaseFileProcessor
class
JsonProcessor
(
BaseFileProcessor
):
@
staticmethod
def
load_from_path
(
filepath
):
with
open
(
filepath
,
'r'
)
as
f
:
obj
=
json
.
load
(
f
)
return
obj
@
staticmethod
def
load_from_fileobj
(
file
):
return
json
.
load
(
file
)
@
staticmethod
def
dump_to_str
(
obj
,
**
kwargs
):
return
json
.
dumps
(
obj
,
**
kwargs
)
@
staticmethod
def
dump_to_path
(
obj
,
filepath
,
**
kwargs
):
with
open
(
filepath
,
'w'
)
as
f
:
json
.
dump
(
obj
,
f
,
**
kwargs
)
@
staticmethod
def
dump_to_fileobj
(
obj
,
file
,
**
kwargs
):
json
.
dump
(
obj
,
file
,
**
kwargs
)
mmcv/fileio/processors/pickle.py
0 → 100644
View file @
eadd1e0f
from
six.moves
import
cPickle
as
pickle
from
.base
import
BaseFileProcessor
class
PickleProcessor
(
BaseFileProcessor
):
@
staticmethod
def
load_from_path
(
filepath
,
**
kwargs
):
with
open
(
filepath
,
'rb'
)
as
f
:
obj
=
pickle
.
load
(
f
,
**
kwargs
)
return
obj
@
staticmethod
def
load_from_fileobj
(
file
,
**
kwargs
):
return
pickle
.
load
(
file
,
**
kwargs
)
@
staticmethod
def
dump_to_str
(
obj
,
**
kwargs
):
kwargs
.
setdefault
(
'protocol'
,
2
)
return
pickle
.
dumps
(
obj
,
**
kwargs
)
@
staticmethod
def
dump_to_path
(
obj
,
filepath
,
**
kwargs
):
kwargs
.
setdefault
(
'protocol'
,
2
)
with
open
(
filepath
,
'wb'
)
as
f
:
pickle
.
dump
(
obj
,
f
,
**
kwargs
)
@
staticmethod
def
dump_to_fileobj
(
obj
,
file
,
**
kwargs
):
kwargs
.
setdefault
(
'protocol'
,
2
)
pickle
.
dump
(
obj
,
file
,
**
kwargs
)
\ No newline at end of file
mmcv/fileio/processors/yaml.py
0 → 100644
View file @
eadd1e0f
import
yaml
try
:
from
yaml
import
CLoader
as
Loader
,
CDumper
as
Dumper
except
ImportError
:
from
yaml
import
Loader
,
Dumper
from
.base
import
BaseFileProcessor
class
YamlProcessor
(
BaseFileProcessor
):
@
staticmethod
def
load_from_path
(
filepath
,
**
kwargs
):
kwargs
.
setdefault
(
'Loader'
,
Loader
)
with
open
(
filepath
,
'r'
)
as
f
:
obj
=
yaml
.
load
(
f
,
**
kwargs
)
return
obj
@
staticmethod
def
load_from_fileobj
(
file
,
**
kwargs
):
kwargs
.
setdefault
(
'Loader'
,
Loader
)
return
yaml
.
load
(
file
,
**
kwargs
)
@
staticmethod
def
dump_to_str
(
obj
,
**
kwargs
):
kwargs
.
setdefault
(
'Dumper'
,
Dumper
)
return
yaml
.
dump
(
obj
,
**
kwargs
)
@
staticmethod
def
dump_to_path
(
obj
,
filepath
,
**
kwargs
):
kwargs
.
setdefault
(
'Dumper'
,
Dumper
)
with
open
(
filepath
,
'w'
)
as
f
:
yaml
.
dump
(
obj
,
f
,
**
kwargs
)
@
staticmethod
def
dump_to_fileobj
(
obj
,
file
,
**
kwargs
):
kwargs
.
setdefault
(
'Dumper'
,
Dumper
)
yaml
.
dump
(
obj
,
file
,
**
kwargs
)
tests/test_io.py
→
tests/test_
file
io.py
View file @
eadd1e0f
File moved
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment