Commit 70a8a9e0 authored by wangwei990215's avatar wangwei990215
Browse files

initial commit

parents
Pipeline #1738 failed with stages
in 0 seconds
import pynini
from fun_text_processing.inverse_text_normalization.fr.graph_utils import DAMO_NOT_QUOTE, GraphFst
from pynini.lib import pynutil
class TelephoneFst(GraphFst):
"""
Finite state transducer for verbalizing telephone, e.g.
telephone { number_part: "02 33 43 53 22" }
-> 02 33 43 53 22
"""
def __init__(self):
super().__init__(name="telephone", kind="verbalize")
number_part = (
pynutil.delete('number_part: "')
+ pynini.closure(DAMO_NOT_QUOTE, 1)
+ pynutil.delete('"')
)
delete_tokens = self.delete_tokens(number_part)
self.fst = delete_tokens.optimize()
import pynini
from fun_text_processing.inverse_text_normalization.fr.graph_utils import (
DAMO_DIGIT,
GraphFst,
delete_extra_space,
delete_space,
)
from fun_text_processing.inverse_text_normalization.fr.utils import get_abs_path
from pynini.lib import pynutil
class TimeFst(GraphFst):
"""
Finite state transducer for verbalizing time, e.g.
time { hours: "8" minutes: "30" suffix: "du matin"} -> 8 h 30
time { hours: "8" minutes: "30" } -> 8 h 30
time { hours: "8" minutes: "30" suffix: "du soir"} -> 20 h 30
"""
def __init__(self):
super().__init__(name="time", kind="verbalize")
hour_to_night = pynini.string_file(get_abs_path("data/time/hour_to_night.tsv"))
day_suffixes = pynutil.delete('suffix: "am"')
night_suffixes = pynutil.delete('suffix: "pm"')
hour = (
pynutil.delete("hours:")
+ delete_space
+ pynutil.delete('"')
+ pynini.closure(DAMO_DIGIT, 1, 2)
+ pynutil.delete('"')
)
minute = (
pynutil.delete("minutes:")
+ delete_extra_space
+ pynutil.delete('"')
+ pynini.closure(DAMO_DIGIT, 1, 2)
+ pynutil.delete('"')
)
graph = (
hour
+ delete_extra_space
+ pynutil.insert("h")
+ minute.ques
+ delete_space
+ day_suffixes.ques
)
graph |= (
hour @ hour_to_night
+ delete_extra_space
+ pynutil.insert("h")
+ minute.ques
+ delete_space
+ night_suffixes
)
delete_tokens = self.delete_tokens(graph)
self.fst = delete_tokens.optimize()
from fun_text_processing.inverse_text_normalization.fr.graph_utils import GraphFst
from fun_text_processing.inverse_text_normalization.fr.verbalizers.cardinal import CardinalFst
from fun_text_processing.inverse_text_normalization.fr.verbalizers.date import DateFst
from fun_text_processing.inverse_text_normalization.fr.verbalizers.decimal import DecimalFst
from fun_text_processing.inverse_text_normalization.fr.verbalizers.electronic import ElectronicFst
from fun_text_processing.inverse_text_normalization.fr.verbalizers.fraction import FractionFst
from fun_text_processing.inverse_text_normalization.fr.verbalizers.measure import MeasureFst
from fun_text_processing.inverse_text_normalization.fr.verbalizers.money import MoneyFst
from fun_text_processing.inverse_text_normalization.fr.verbalizers.ordinal import OrdinalFst
from fun_text_processing.inverse_text_normalization.fr.verbalizers.telephone import TelephoneFst
from fun_text_processing.inverse_text_normalization.fr.verbalizers.time import TimeFst
from fun_text_processing.inverse_text_normalization.fr.verbalizers.whitelist import WhiteListFst
class VerbalizeFst(GraphFst):
"""
Composes other verbalizer grammars.
For deployment, this grammar will be compiled and exported to OpenFst Finate State Archiv (FAR) File.
More details to deployment at NeMo/tools/text_processing_deployment.
"""
def __init__(self):
super().__init__(name="verbalize", kind="verbalize")
cardinal = CardinalFst()
cardinal_graph = cardinal.fst
ordinal_graph = OrdinalFst().fst
decimal = DecimalFst()
decimal_graph = decimal.fst
fraction = FractionFst()
fraction_graph = fraction.fst
measure_graph = MeasureFst(decimal=decimal, cardinal=cardinal, fraction=fraction).fst
money_graph = MoneyFst(decimal=decimal).fst
time_graph = TimeFst().fst
date_graph = DateFst().fst
whitelist_graph = WhiteListFst().fst
telephone_graph = TelephoneFst().fst
electronic_graph = ElectronicFst().fst
graph = (
time_graph
| date_graph
| money_graph
| measure_graph
| fraction_graph
| ordinal_graph
| decimal_graph
| cardinal_graph
| whitelist_graph
| telephone_graph
| electronic_graph
)
self.fst = graph
import pynini
from fun_text_processing.inverse_text_normalization.fr.graph_utils import (
GraphFst,
delete_extra_space,
delete_space,
)
from fun_text_processing.inverse_text_normalization.fr.verbalizers.verbalize import VerbalizeFst
from fun_text_processing.inverse_text_normalization.fr.verbalizers.word import WordFst
from pynini.lib import pynutil
class VerbalizeFinalFst(GraphFst):
"""
Finite state transducer that verbalizes an entire sentence, e.g.
tokens { name: "its" } tokens { time { hours: "12" minutes: "30" } } tokens { name: "now" } -> its 12:30 now
"""
def __init__(self):
super().__init__(name="verbalize_final", kind="verbalize")
verbalize = VerbalizeFst().fst
word = WordFst().fst
types = verbalize | word
graph = (
pynutil.delete("tokens")
+ delete_space
+ pynutil.delete("{")
+ delete_space
+ types
+ delete_space
+ pynutil.delete("}")
)
graph = delete_space + pynini.closure(graph + delete_extra_space) + graph + delete_space
self.fst = graph
import pynini
from fun_text_processing.inverse_text_normalization.fr.graph_utils import (
DAMO_CHAR,
DAMO_SIGMA,
GraphFst,
delete_space,
)
from pynini.lib import pynutil
class WhiteListFst(GraphFst):
"""
Finite state transducer for verbalizing whitelist
e.g. tokens { name: "mrs." } -> mrs.
"""
def __init__(self):
super().__init__(name="whitelist", kind="verbalize")
graph = (
pynutil.delete("name:")
+ delete_space
+ pynutil.delete('"')
+ pynini.closure(DAMO_CHAR - " ", 1)
+ pynutil.delete('"')
)
graph = graph @ pynini.cdrewrite(pynini.cross("\u00A0", " "), "", "", DAMO_SIGMA)
self.fst = graph.optimize()
import pynini
from fun_text_processing.inverse_text_normalization.fr.graph_utils import (
DAMO_CHAR,
DAMO_SIGMA,
GraphFst,
delete_space,
)
from pynini.lib import pynutil
class WordFst(GraphFst):
"""
Finite state transducer for verbalizing plain tokens
e.g. tokens { name: "sleep" } -> sleep
"""
def __init__(self):
super().__init__(name="word", kind="verbalize")
chars = pynini.closure(DAMO_CHAR - " ", 1)
char = (
pynutil.delete("name:")
+ delete_space
+ pynutil.delete('"')
+ chars
+ pynutil.delete('"')
)
graph = char @ pynini.cdrewrite(pynini.cross("\u00A0", " "), "", "", DAMO_SIGMA)
self.fst = graph.optimize()
from fun_text_processing.inverse_text_normalization.id.taggers.tokenize_and_classify import (
ClassifyFst,
)
from fun_text_processing.inverse_text_normalization.id.verbalizers.verbalize import VerbalizeFst
from fun_text_processing.inverse_text_normalization.id.verbalizers.verbalize_final import (
VerbalizeFinalFst,
)
from argparse import ArgumentParser
from typing import List
import regex as re
from fun_text_processing.text_normalization.data_loader_utils import (
EOS_TYPE,
Instance,
load_files,
training_data_to_sentences,
)
"""
This file is for evaluation purposes.
filter_loaded_data() cleans data (list of instances) for inverse text normalization. Filters and cleaners can be specified for each semiotic class individually.
For example, normalized text should only include characters and whitespace characters but no punctuation.
Cardinal unnormalized instances should contain at least one integer and all other characters are removed.
"""
class Filter:
"""
Filter class
Args:
class_type: semiotic class used in dataset
process_func: function to transform text
filter_func: function to filter text
"""
def __init__(self, class_type: str, process_func: object, filter_func: object):
self.class_type = class_type
self.process_func = process_func
self.filter_func = filter_func
def filter(self, instance: Instance) -> bool:
"""
filter function
Args:
filters given instance with filter function
Returns: True if given instance fulfills criteria or does not belong to class type
"""
if instance.token_type != self.class_type:
return True
return self.filter_func(instance)
def process(self, instance: Instance) -> Instance:
"""
process function
Args:
processes given instance with process function
Returns: processed instance if instance belongs to expected class type or original instance
"""
if instance.token_type != self.class_type:
return instance
return self.process_func(instance)
def filter_cardinal_1(instance: Instance) -> bool:
ok = re.search(r"[0-9]", instance.un_normalized)
return ok
def process_cardinal_1(instance: Instance) -> Instance:
un_normalized = instance.un_normalized
normalized = instance.normalized
un_normalized = re.sub(r"[^0-9]", "", un_normalized)
normalized = re.sub(r"[^a-z ]", "", normalized)
return Instance(
token_type=instance.token_type, un_normalized=un_normalized, normalized=normalized
)
def filter_ordinal_1(instance: Instance) -> bool:
ok = re.search(r"(st|nd|rd|th)\s*$", instance.un_normalized)
return ok
def process_ordinal_1(instance: Instance) -> Instance:
un_normalized = instance.un_normalized
normalized = instance.normalized
un_normalized = re.sub(r"[,\s]", "", un_normalized)
normalized = re.sub(r"[^a-z ]", "", normalized)
return Instance(
token_type=instance.token_type, un_normalized=un_normalized, normalized=normalized
)
def filter_decimal_1(instance: Instance) -> bool:
ok = re.search(r"[0-9]", instance.un_normalized)
return ok
def process_decimal_1(instance: Instance) -> Instance:
un_normalized = instance.un_normalized
un_normalized = re.sub(r",", "", un_normalized)
normalized = instance.normalized
normalized = re.sub(r"[^a-z ]", "", normalized)
return Instance(
token_type=instance.token_type, un_normalized=un_normalized, normalized=normalized
)
def filter_measure_1(instance: Instance) -> bool:
ok = True
return ok
def process_measure_1(instance: Instance) -> Instance:
un_normalized = instance.un_normalized
normalized = instance.normalized
un_normalized = re.sub(r",", "", un_normalized)
un_normalized = re.sub(r"m2", "m²", un_normalized)
un_normalized = re.sub(r"(\d)([^\d.\s])", r"\1 \2", un_normalized)
normalized = re.sub(r"[^a-z\s]", "", normalized)
normalized = re.sub(r"per ([a-z\s]*)s$", r"per \1", normalized)
normalized = re.sub(r"[^a-z ]", "", normalized)
return Instance(
token_type=instance.token_type, un_normalized=un_normalized, normalized=normalized
)
def filter_money_1(instance: Instance) -> bool:
ok = re.search(r"[0-9]", instance.un_normalized)
return ok
def process_money_1(instance: Instance) -> Instance:
un_normalized = instance.un_normalized
normalized = instance.normalized
un_normalized = re.sub(r",", "", un_normalized)
un_normalized = re.sub(r"a\$", r"$", un_normalized)
un_normalized = re.sub(r"us\$", r"$", un_normalized)
un_normalized = re.sub(r"(\d)m\s*$", r"\1 juta", un_normalized)
un_normalized = re.sub(r"(\d)bn?\s*$", r"\1 milyar", un_normalized)
normalized = re.sub(r"[^a-z ]", "", normalized)
return Instance(
token_type=instance.token_type, un_normalized=un_normalized, normalized=normalized
)
def filter_time_1(instance: Instance) -> bool:
ok = re.search(r"[0-9]", instance.un_normalized)
return ok
def process_time_1(instance: Instance) -> Instance:
un_normalized = instance.un_normalized
un_normalized = re.sub(r": ", ":", un_normalized)
un_normalized = re.sub(r"(\d)\s?a\s?m\s?", r"\1 a.m.", un_normalized)
un_normalized = re.sub(r"(\d)\s?p\s?m\s?", r"\1 p.m.", un_normalized)
normalized = instance.normalized
normalized = re.sub(r"[^a-z ]", "", normalized)
return Instance(
token_type=instance.token_type, un_normalized=un_normalized, normalized=normalized
)
def filter_plain_1(instance: Instance) -> bool:
ok = True
return ok
def process_plain_1(instance: Instance) -> Instance:
un_normalized = instance.un_normalized
normalized = instance.normalized
return Instance(
token_type=instance.token_type, un_normalized=un_normalized, normalized=normalized
)
def filter_punct_1(instance: Instance) -> bool:
ok = True
return ok
def process_punct_1(instance: Instance) -> Instance:
un_normalized = instance.un_normalized
normalized = instance.normalized
return Instance(
token_type=instance.token_type, un_normalized=un_normalized, normalized=normalized
)
def filter_date_1(instance: Instance) -> bool:
ok = True
return ok
def process_date_1(instance: Instance) -> Instance:
un_normalized = instance.un_normalized
un_normalized = re.sub(r",", "", un_normalized)
normalized = instance.normalized
normalized = re.sub(r"[^a-z ]", "", normalized)
return Instance(
token_type=instance.token_type, un_normalized=un_normalized, normalized=normalized
)
def filter_letters_1(instance: Instance) -> bool:
ok = True
return ok
def process_letters_1(instance: Instance) -> Instance:
un_normalized = instance.un_normalized
normalized = instance.normalized
normalized = re.sub(r"[^a-z ]", "", normalized)
return Instance(
token_type=instance.token_type, un_normalized=un_normalized, normalized=normalized
)
def filter_verbatim_1(instance: Instance) -> bool:
ok = True
return ok
def process_verbatim_1(instance: Instance) -> Instance:
un_normalized = instance.un_normalized
normalized = instance.normalized
return Instance(
token_type=instance.token_type, un_normalized=un_normalized, normalized=normalized
)
def filter_digit_1(instance: Instance) -> bool:
ok = re.search(r"[0-9]", instance.un_normalized)
return ok
def process_digit_1(instance: Instance) -> Instance:
un_normalized = instance.un_normalized
normalized = instance.normalized
normalized = re.sub(r"[^a-z ]", "", normalized)
return Instance(
token_type=instance.token_type, un_normalized=un_normalized, normalized=normalized
)
def filter_telephone_1(instance: Instance) -> bool:
ok = re.search(r"[0-9]", instance.un_normalized)
return ok
def process_telephone_1(instance: Instance) -> Instance:
un_normalized = instance.un_normalized
normalized = instance.normalized
normalized = re.sub(r"[^a-z ]", "", normalized)
return Instance(
token_type=instance.token_type, un_normalized=un_normalized, normalized=normalized
)
def filter_electronic_1(instance: Instance) -> bool:
ok = re.search(r"[0-9]", instance.un_normalized)
return ok
def process_electronic_1(instance: Instance) -> Instance:
un_normalized = instance.un_normalized
normalized = instance.normalized
normalized = re.sub(r"[^a-z ]", "", normalized)
return Instance(
token_type=instance.token_type, un_normalized=un_normalized, normalized=normalized
)
def filter_fraction_1(instance: Instance) -> bool:
ok = re.search(r"[0-9]", instance.un_normalized)
return ok
def process_fraction_1(instance: Instance) -> Instance:
un_normalized = instance.un_normalized
normalized = instance.normalized
normalized = re.sub(r"[^a-z ]", "", normalized)
return Instance(
token_type=instance.token_type, un_normalized=un_normalized, normalized=normalized
)
def filter_address_1(instance: Instance) -> bool:
ok = True
return ok
def process_address_1(instance: Instance) -> Instance:
un_normalized = instance.un_normalized
normalized = instance.normalized
normalized = re.sub(r"[^a-z ]", "", normalized)
return Instance(
token_type=instance.token_type, un_normalized=un_normalized, normalized=normalized
)
filters = []
filters.append(
Filter(class_type="CARDINAL", process_func=process_cardinal_1, filter_func=filter_cardinal_1)
)
filters.append(
Filter(class_type="ORDINAL", process_func=process_ordinal_1, filter_func=filter_ordinal_1)
)
filters.append(
Filter(class_type="DECIMAL", process_func=process_decimal_1, filter_func=filter_decimal_1)
)
filters.append(
Filter(class_type="MEASURE", process_func=process_measure_1, filter_func=filter_measure_1)
)
filters.append(Filter(class_type="MONEY", process_func=process_money_1, filter_func=filter_money_1))
filters.append(Filter(class_type="TIME", process_func=process_time_1, filter_func=filter_time_1))
filters.append(Filter(class_type="DATE", process_func=process_date_1, filter_func=filter_date_1))
filters.append(Filter(class_type="PLAIN", process_func=process_plain_1, filter_func=filter_plain_1))
filters.append(Filter(class_type="PUNCT", process_func=process_punct_1, filter_func=filter_punct_1))
filters.append(
Filter(class_type="LETTERS", process_func=process_letters_1, filter_func=filter_letters_1)
)
filters.append(
Filter(class_type="VERBATIM", process_func=process_verbatim_1, filter_func=filter_verbatim_1)
)
filters.append(Filter(class_type="DIGIT", process_func=process_digit_1, filter_func=filter_digit_1))
filters.append(
Filter(class_type="TELEPHONE", process_func=process_telephone_1, filter_func=filter_telephone_1)
)
filters.append(
Filter(
class_type="ELECTRONIC", process_func=process_electronic_1, filter_func=filter_electronic_1
)
)
filters.append(
Filter(class_type="FRACTION", process_func=process_fraction_1, filter_func=filter_fraction_1)
)
filters.append(
Filter(class_type="ADDRESS", process_func=process_address_1, filter_func=filter_address_1)
)
filters.append(Filter(class_type=EOS_TYPE, process_func=lambda x: x, filter_func=lambda x: True))
def filter_loaded_data(data: List[Instance], verbose: bool = False) -> List[Instance]:
"""
Filters list of instances
Args:
data: list of instances
Returns: filtered and transformed list of instances
"""
updates_instances = []
for instance in data:
updated_instance = False
for fil in filters:
if fil.class_type == instance.token_type and fil.filter(instance):
instance = fil.process(instance)
updated_instance = True
if updated_instance:
if verbose:
print(instance)
updates_instances.append(instance)
return updates_instances
def parse_args():
parser = ArgumentParser()
parser.add_argument(
"--input", help="input file path", type=str, default="./id_with_types/output-00001-of-00100"
)
parser.add_argument("--verbose", help="print filtered instances", action="store_true")
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
file_path = args.input
print("Loading training data: " + file_path)
instance_list = load_files([file_path]) # List of instances
filtered_instance_list = filter_loaded_data(instance_list, args.verbose)
training_data_to_sentences(filtered_instance_list)
$ dolar
$ dolar Amerika
$ dolar amerika serikat
Rp rupiah
IDR rupiah
£ pound inggris
€ euro
₩ won
nzd dolar selandia baru
rs rupee
chf swiss franc
dkk kroner Denmark
fim markka finlandia
aed dirham emirat arab
¥ yen
czk koruna ceko
mro ouguiya mauritania
pkr rupee pakistan
crc usus besar kosta rika
hk$ dollar Hongkong
npr rupee nepal
awg florin aruba
nok kroner norwegia
tzs shilling tanzan
sek kronor Swedia
cyp pound siprus
r nyata
sar riyal saudi
cve escudo tanjung verde
rsd dinar serbia
dm tanda jerman
shp santo helena pound
php peso filipina
cad dolar Kanada
ssp pound sunda selatan
scr rupiah seychelles
mvr rufiyaa maldivian
g mail gmail
gmail
n vidia nvidia
nvidia
outlook
hotmail
yahoo
aol
gmx
msn
live
yandex
orange
wanadoo
web
comcast
google
. dot
- berlari
- tanda penghubung
_ menggarisbawahi
! tanda seru
$ tanda dollar
& simbol untuk 'dan
' mengutip
* asterisk
+ plus
/ memotong
? tanda tanya
^ sirkomfleks
` kutipan tunggal yang tepat
{ penyangga kiri
| batang vertikal
} penjepit kanan
~ pasang surut
, koma
% persen
# tanda pagar
= sama dengan
@ at
_ garis bawah
~ tilde
≥ lebih besar dari atau sama dengan
≤ lebih kecil dari atau sama dengan
≠ tidak sama dengan
≈ mendekati sama dengan
± kurang lebih
× kali
A A
B B
C C
D D
E E
F F
G G
H H
I I
J J
K K
L L
M M
N N
O O
P P
Q Q
R R
S S
T T
U U
V V
W W
X X
Y Y
Z Z
a A
b B
c C
d D
e E
f F
g G
h H
i I
j J
k K
l L
m M
n N
o O
p P
q Q
r R
s S
t T
u U
v V
w W
x X
y Y
z Z
Α alfa
Β beta
Γ gamma
Δ delta
Ε epsilon
Ζ zeta
Θ theta
Ι iota
Κ kappa
∧ lambda
Μ mu
Ν nu
Ξ ksi
Ο omikron
∏ pi
Ρ rho
∑ sigma
Τ tau
Υ upsilon
Φ phi
Χ khi
Ψ psi
Ω omega
α alfa
β beta
γ gamma
δ delta
ε epsilon
ζ zeta
η eta
θ theta
ι iota
κ kappa
λ lambda
μ mu
ν nu
ξ ksi
ο omikron
π pi
ρ rho
σ sigma
τ tau
υ upsilon
φ phi
χ khi
ψ psi
ω omega
\ No newline at end of file
k ribu
m juta
b milyar
t triliun
\ No newline at end of file
fahrenheit f
celsius c
kilometer km
meter m
sentimeter cm
milimeter mm
hektar ha
mil mi
meter persegi m²
kilometer persegi km²
kaki ft
persen %
hertz hz
kilowat kw
daya kuda hp
miligram mg
kilogram kg
gigahertz ghz
kilohertz khz
megahertz mhz
volt v
jam h
mega coulomb mc
kedua s
nanometer nm
revolusi per menit rpm
menit min
mili ampere mA
persen %
kilo watt jam kwh
meter kubik m³
mil per jam mph
tera watt tw
mili volt mv
megawatt mw
mikrometer μm
terabyte tb
c c cc
gram g
dalton da
suasana atm
ohm ω
desibel db
peta kedua ps
ons oz
hekto liter hl
mikrogram μg
petagram pg
gigabyte gb
kilobit kb
elektron volt ev
megabita mb
kilobyte kb
kilobit per detik kbps
megabit per detik mbps
batu st
kilo liter kl
tera joule tj
kilo volt kv
mega volt mv
kilonewton kn
megameter mm
satuan astronomi au
halaman yd
radian rad
lumen lm
hekto detik hs
tahi lalat mol
giga pascal gpa
mililiter ml
gigawatt gw
mega ampere ma
simpul kt
kekuatan kilogram kgf
nano gram ng
nanodetik ns
mega siemens ms
batang bar
giga liter gl
mikrodetik μs
desi ampere da
pascal pa
desi detik ds
mili detik ms
meteran desi dm
kubik desi meter dm³
satuan massa atom amu
megabita mb
mega farad mf
becquerel bq
petabit pb
milimeter persegi mm²
sentimeter persegi cm²
mil persegi sq mi
kaki persegi sq ft
kilopascal kpa
candela cd
tera liter tl
mega detik ms
megapascal mpa
meteran peta pm
peta byte pb
giga watt jam gwh
kilo kalori kcal
abu-abu gy
saringan sv
kelas seratus cwt
c c cc
kaki persegi sq ft
inci persegi sq in
kaki persegi sqft
kaki persegi SqFt
milidetik msec
kilowatt jam kw·h
miliampere-jam mA⋅h
kilokalori kcal
kilokalori kCal
kilokalori Kcal
milimeter merkuri mmhg
milimeter merkuri mmHg
milimeter persegi mm2
milimeter kubik mm3
sentimeter persegi cm2
sentimeter kubik cm3
kilometer persegi km2
kilometer kubik km3
desimeter persegi dm2
desimeter kubik dm3
detik sec
jam hrs
mil per jam mph
kiloherts khz
kiloherts kHz
megahertz mhz
megahertz mHz
gigahertz ghz
gigahertz gHz
kilowatt jam kwh
mol mol
sendok the tsp
kilopascal kPa
volume vol
volume Vol
rotasi per menit rpm
detak per menit bpm
galon gal
pascal pa
megapascal mpa
miliampere-jam mah
menit min
detik SEC
kalori cal
kilokalori Cal
meter persegi m2
meter kubik m3
milimeter mm
sentimeter cm
sentimeter CM
kilometer km
kilometer KM
miligram mg
kilogram kg
kilogram KG
jam hr
hertz hz
hertz Hz
mililiter ml
mililiter mL
miliampere mA
sentimeter kubik cc
piksel px
piksel PX
volt v
volt V
kilovolt kv
kilovolt kV
hektar ha
ekar ac
karat ct
inci in
kaki ft
yard yd
nanometer nm
desimeter dm
kilobyte kb
megabyte mb
gigabyte gb
terabyte tb
kilobyte KB
megabyte MB
gigabyte GB
terabyte TB
daya kuda hp
desibel db
desibel dB
kilojoule kj
kilojoule kJ
ons oz
kilowatt kw
kilowatt KW
joule j
joule J
gram g
gram G
liter l
liter L
meter m
ohm-meter Ω·m
mikrometer μm
mikrogram μg
mikroampere μA
derajat Celsius °c
derajat Celsius ˚c
derajat Fahrenheit °f
derajat Fahrenheit ˚f
milimeter persegi mm²
milimeter kubik mm³
sentimeter persegi cm²
sentimeter kubik cm³
kilometer persegi km²
kilometer kubik km³
desimeter kubik dm³
meter persegi m²
meter kubik m³
derajat °
derajat ˚
derajat Celsius ℃
derajat Fahrenheit ℉
ohm Ω
mikrometer μm
milimeter persegi mm2
milimeter kubik mm3
milimeter mm
sentimeter persegi cm2
sentimeter kubik cm3
sentimeter cm
sentimeter CM
kilometer persegi km2
kilometer kubik km3
kilometer km
kilometer KM
desimeter persegi dm2
desimeter kubik dm3
meter persegi m2
meter kubik m3
kaki persegi sq ft
kaki persegi sq. ft
kaki persegi sq.ft
kaki persegi sqft
kaki persegi SqFt
inci persegi sq in
mikrogram μg
miligram mg
kilogram kg
kilogram KG
milidetik msec
detik sec
jam hr
jam hrs
meter per detik m/s
kilometer per jam km/h
mil per jam mph
bit per detik bit/s
bit per detik Bit/s
byte per detik byte/s
byte per detik Byte/s
derajat Celsius °c
derajat Celsius ˚c
derajat Fahrenheit °f
derajat Fahrenheit ˚f
kilokalori kcal
kilokalori kCal
kilokalori Kcal
ons cairan fl.oz
farad per meter F/m
gram per liter g/l
gram per mililiter g/mL
hertz hz
hertz Hz
kiloherts khz
kiloherts kHz
megahertz mhz
megahertz mHz
gigahertz ghz
gigahertz gHz
kilometer per jam km/h
kilowatt per jam kw/h
kilowatt jam kw·h
kilowatt jam kS·h
kilowatt jam kwh
kilowatt jam kSh
mililiter ml
mililiter mL
miligram per mililiter mg/ml
miligram per mililiter mg/mL
miligram per liter mg/l
miligram per liter mg/L
miliampere mA
miliampere-jam mA⋅h
mol mol
ohm-meter Ω·m
siemens per meter S/m
sendok the tsp
mikroampere μA
kilopascal kPa
milimeter merkuri mmhg
milimeter merkuri mmHg
volume vol
volume Vol
sentimeter kubik cc
rotasi per menit rpm
rotasi per menit r/min
detak per menit bpm
piksel px
piksel PX
volt v
volt V
kilovolt kv
kilovolt kV
hektar ha
ekar ac
karat ct
liter l
liter L
galon gal
mol mol
pascal pa
megapascal mpa
miliampere ma
miliampere-jam mah
inci in
kaki ft
yard yd
nanometer nm
meter m
desimeter dm
gram g
gram G
kilobyte kb
megabyte mb
gigabyte gb
terabyte tb
kilobyte KB
megabyte MB
gigabyte GB
terabyte TB
daya kuda hp
desibel db
desibel dB
joule j
joule J
kilojoule kj
kilojoule kJ
ons oz
kilowatt kw
kilowatt KW
menit min
detik SEC
kalori cal
kilokalori Cal
inci "
\ No newline at end of file
Januari
Februari
Maret
April
Mei
Juni
Juli
Agustus
September
Oktober
November
Desember
\ No newline at end of file
satu 1
dua 2
tiga 3
empat 4
lima 5
enam 6
tujuh 7
delapan 8
sembilan 9
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment