virtualx-engine/core/compressed_translation.cpp

314 lines
9.3 KiB
C++
Raw Normal View History

2014-02-10 02:10:30 +01:00
/*************************************************************************/
/* compressed_translation.cpp */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
2014-02-10 02:10:30 +01:00
/*************************************************************************/
/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
2014-02-10 02:10:30 +01:00
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#include "compressed_translation.h"
2014-02-10 02:10:30 +01:00
#include "pair.h"
Split thirdparty smaz.c out of compressed_translation.cpp Code comes from https://github.com/antirez/smaz/blob/150e125cbae2e8fd20dd332432776ce13395d4d4/smaz.c With a small modification to match Godot expectations: ``` diff --git a/thirdparty/core/smaz.c b/thirdparty/core/smaz.c index 9b1ebc2..555dfea 100644 --- a/thirdparty/core/smaz.c +++ b/thirdparty/core/smaz.c @@ -14,7 +14,7 @@ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND #include <string.h> /* Our compression codebook, used for compression */ -static char *Smaz_cb[241] = { +static const char *Smaz_cb[241] = { "\002s,\266", "\003had\232\002leW", "\003on \216", "", "\001yS", "\002ma\255\002li\227", "\003or \260", "", "\002ll\230\003s t\277", "\004fromg\002mel", "", "\003its\332", "\001z\333", "\003ingF", "\001>\336", @@ -89,7 +89,7 @@ static char *Smaz_rcb[254] = { "e, ", " it", "whi", " ma", "ge", "x", "e c", "men", ".com" }; -int smaz_compress(char *in, int inlen, char *out, int outlen) { +int smaz_compress(const char *in, int inlen, char *out, int outlen) { unsigned int h1,h2,h3=0; int verblen = 0, _outlen = outlen; char verb[256], *_out = out; @@ -167,7 +167,7 @@ out: return out-_out; } -int smaz_decompress(char *in, int inlen, char *out, int outlen) { +int smaz_decompress(const char *in, int inlen, char *out, int outlen) { unsigned char *c = (unsigned char*) in; char *_out = out; int _outlen = outlen; @@ -192,7 +192,7 @@ int smaz_decompress(char *in, int inlen, char *out, int outlen) { inlen -= 2+len; } else { /* Codebook entry */ - char *s = Smaz_rcb[*c]; + const char *s = Smaz_rcb[*c]; int len = strlen(s); if (outlen < len) return _outlen+1; diff --git a/thirdparty/core/smaz.h b/thirdparty/core/smaz.h index a547d89..a9d8a33 100644 --- a/thirdparty/core/smaz.h +++ b/thirdparty/core/smaz.h @@ -14,7 +14,7 @@ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND #ifndef _SMAZ_H #define _SMAZ_H -int smaz_compress(char *in, int inlen, char *out, int outlen); -int smaz_decompress(char *in, int inlen, char *out, int outlen); +int smaz_compress(const char *in, int inlen, char *out, int outlen); +int smaz_decompress(const char *in, int inlen, char *out, int outlen); #endif ```
2017-04-28 19:00:11 +02:00
extern "C" {
#include "thirdparty/misc/smaz.h"
2014-02-10 02:10:30 +01:00
}
struct _PHashTranslationCmp {
int orig_len;
CharString compressed;
int offset;
};
void PHashTranslation::generate(const Ref<Translation> &p_from) {
#ifdef TOOLS_ENABLED
List<StringName> keys;
p_from->get_message_list(&keys);
int size = Math::larger_prime(keys.size());
2014-02-10 02:10:30 +01:00
print_line("compressing keys: " + itos(keys.size()));
Vector<Vector<Pair<int, CharString> > > buckets;
Vector<Map<uint32_t, int> > table;
Vector<uint32_t> hfunc_table;
Vector<_PHashTranslationCmp> compressed;
2014-02-10 02:10:30 +01:00
table.resize(size);
hfunc_table.resize(size);
buckets.resize(size);
compressed.resize(keys.size());
int idx = 0;
int total_compression_size = 0;
int total_string_size = 0;
2014-02-10 02:10:30 +01:00
for (List<StringName>::Element *E = keys.front(); E; E = E->next()) {
2014-02-10 02:10:30 +01:00
//hash string
CharString cs = E->get().operator String().utf8();
uint32_t h = hash(0, cs.get_data());
Pair<int, CharString> p;
p.first = idx;
p.second = cs;
2014-02-10 02:10:30 +01:00
buckets[h % size].push_back(p);
//compress string
CharString src_s = p_from->get_message(E->get()).operator String().utf8();
_PHashTranslationCmp ps;
ps.orig_len = src_s.size();
ps.offset = total_compression_size;
2014-02-10 02:10:30 +01:00
if (ps.orig_len != 0) {
2014-02-10 02:10:30 +01:00
CharString dst_s;
dst_s.resize(src_s.size());
int ret = smaz_compress(src_s.get_data(), src_s.size(), &dst_s[0], src_s.size());
if (ret >= src_s.size()) {
2014-02-10 02:10:30 +01:00
//if compressed is larger than original, just use original
ps.orig_len = src_s.size();
ps.compressed = src_s;
2014-02-10 02:10:30 +01:00
} else {
dst_s.resize(ret);
//ps.orig_len=;
ps.compressed = dst_s;
2014-02-10 02:10:30 +01:00
}
} else {
ps.orig_len = 1;
2014-02-10 02:10:30 +01:00
ps.compressed.resize(1);
ps.compressed[0] = 0;
2014-02-10 02:10:30 +01:00
}
compressed[idx] = ps;
total_compression_size += ps.compressed.size();
total_string_size += src_s.size();
2014-02-10 02:10:30 +01:00
idx++;
}
int bucket_table_size = 0;
print_line("total compressed string size: " + itos(total_compression_size) + " (" + itos(total_string_size) + " uncompressed).");
2014-02-10 02:10:30 +01:00
for (int i = 0; i < size; i++) {
2014-02-10 02:10:30 +01:00
Vector<Pair<int, CharString> > &b = buckets[i];
Map<uint32_t, int> &t = table[i];
2014-02-10 02:10:30 +01:00
if (b.size() == 0)
2014-02-10 02:10:30 +01:00
continue;
//print_line("bucket: "+itos(i)+" - elements: "+itos(b.size()));
int d = 1;
int item = 0;
2014-02-10 02:10:30 +01:00
while (item < b.size()) {
2014-02-10 02:10:30 +01:00
uint32_t slot = hash(d, b[item].second.get_data());
2014-02-10 02:10:30 +01:00
if (t.has(slot)) {
item = 0;
2014-02-10 02:10:30 +01:00
d++;
t.clear();
} else {
t[slot] = b[item].first;
2014-02-10 02:10:30 +01:00
item++;
}
}
hfunc_table[i] = d;
bucket_table_size += 2 + b.size() * 4;
2014-02-10 02:10:30 +01:00
}
print_line("bucket table size: " + itos(bucket_table_size * 4));
print_line("hash table size: " + itos(size * 4));
2014-02-10 02:10:30 +01:00
hash_table.resize(size);
bucket_table.resize(bucket_table_size);
PoolVector<int>::Write htwb = hash_table.write();
PoolVector<int>::Write btwb = bucket_table.write();
2014-02-10 02:10:30 +01:00
uint32_t *htw = (uint32_t *)&htwb[0];
uint32_t *btw = (uint32_t *)&btwb[0];
2014-02-10 02:10:30 +01:00
int btindex = 0;
int collisions = 0;
2014-02-10 02:10:30 +01:00
for (int i = 0; i < size; i++) {
2014-02-10 02:10:30 +01:00
Map<uint32_t, int> &t = table[i];
if (t.size() == 0) {
htw[i] = 0xFFFFFFFF; //nothing
2014-02-10 02:10:30 +01:00
continue;
} else if (t.size() > 1) {
collisions += t.size() - 1;
2014-02-10 02:10:30 +01:00
}
htw[i] = btindex;
btw[btindex++] = t.size();
btw[btindex++] = hfunc_table[i];
2014-02-10 02:10:30 +01:00
for (Map<uint32_t, int>::Element *E = t.front(); E; E = E->next()) {
2014-02-10 02:10:30 +01:00
btw[btindex++] = E->key();
btw[btindex++] = compressed[E->get()].offset;
btw[btindex++] = compressed[E->get()].compressed.size();
btw[btindex++] = compressed[E->get()].orig_len;
2014-02-10 02:10:30 +01:00
}
}
print_line("total collisions: " + itos(collisions));
2014-02-10 02:10:30 +01:00
strings.resize(total_compression_size);
PoolVector<uint8_t>::Write cw = strings.write();
2014-02-10 02:10:30 +01:00
for (int i = 0; i < compressed.size(); i++) {
memcpy(&cw[compressed[i].offset], compressed[i].compressed.get_data(), compressed[i].compressed.size());
2014-02-10 02:10:30 +01:00
}
ERR_FAIL_COND(btindex != bucket_table_size);
2014-02-10 02:10:30 +01:00
set_locale(p_from->get_locale());
#endif
}
bool PHashTranslation::_set(const StringName &p_name, const Variant &p_value) {
2014-02-10 02:10:30 +01:00
String name = p_name.operator String();
if (name == "hash_table") {
hash_table = p_value;
2015-12-29 16:56:58 +01:00
//print_line("translation: loaded hash table of size: "+itos(hash_table.size()));
} else if (name == "bucket_table") {
bucket_table = p_value;
2015-12-29 16:56:58 +01:00
//print_line("translation: loaded bucket table of size: "+itos(bucket_table.size()));
} else if (name == "strings") {
strings = p_value;
2015-12-29 16:56:58 +01:00
//print_line("translation: loaded string table of size: "+itos(strings.size()));
} else if (name == "load_from") {
2015-12-29 16:56:58 +01:00
//print_line("generating");
2014-02-10 02:10:30 +01:00
generate(p_value);
} else
return false;
return true;
}
bool PHashTranslation::_get(const StringName &p_name, Variant &r_ret) const {
2014-02-10 02:10:30 +01:00
String name = p_name.operator String();
if (name == "hash_table")
r_ret = hash_table;
else if (name == "bucket_table")
r_ret = bucket_table;
else if (name == "strings")
r_ret = strings;
2014-02-10 02:10:30 +01:00
else
return false;
return true;
}
StringName PHashTranslation::get_message(const StringName &p_src_text) const {
2014-02-10 02:10:30 +01:00
int htsize = hash_table.size();
if (htsize == 0)
2014-02-10 02:10:30 +01:00
return StringName();
CharString str = p_src_text.operator String().utf8();
uint32_t h = hash(0, str.get_data());
2014-02-10 02:10:30 +01:00
PoolVector<int>::Read htr = hash_table.read();
const uint32_t *htptr = (const uint32_t *)&htr[0];
PoolVector<int>::Read btr = bucket_table.read();
const uint32_t *btptr = (const uint32_t *)&btr[0];
PoolVector<uint8_t>::Read sr = strings.read();
const char *sptr = (const char *)&sr[0];
2014-02-10 02:10:30 +01:00
uint32_t p = htptr[h % htsize];
2014-02-10 02:10:30 +01:00
//print_line("String: "+p_src_text.operator String());
//print_line("Hash: "+itos(p));
if (p == 0xFFFFFFFF) {
//print_line("GETMSG: Nothing!");
2014-02-10 02:10:30 +01:00
return StringName(); //nothing
}
const Bucket &bucket = *(const Bucket *)&btptr[p];
2014-02-10 02:10:30 +01:00
h = hash(bucket.func, str.get_data());
2014-02-10 02:10:30 +01:00
int idx = -1;
2014-02-10 02:10:30 +01:00
for (int i = 0; i < bucket.size; i++) {
2014-02-10 02:10:30 +01:00
if (bucket.elem[i].key == h) {
2014-02-10 02:10:30 +01:00
idx = i;
2014-02-10 02:10:30 +01:00
break;
}
}
//print_line("bucket pos: "+itos(idx));
if (idx == -1) {
//print_line("GETMSG: Not in Bucket!");
2014-02-10 02:10:30 +01:00
return StringName();
}
if (bucket.elem[idx].comp_size == bucket.elem[idx].uncomp_size) {
String rstr;
rstr.parse_utf8(&sptr[bucket.elem[idx].str_offset], bucket.elem[idx].uncomp_size);
//print_line("Uncompressed, size: "+itos(bucket.elem[idx].comp_size));
//print_line("Return: "+rstr);
2014-02-10 02:10:30 +01:00
return rstr;
} else {
CharString uncomp;
uncomp.resize(bucket.elem[idx].uncomp_size + 1);
smaz_decompress(&sptr[bucket.elem[idx].str_offset], bucket.elem[idx].comp_size, uncomp.ptr(), bucket.elem[idx].uncomp_size);
2014-02-10 02:10:30 +01:00
String rstr;
rstr.parse_utf8(uncomp.get_data());
//print_line("Compressed, size: "+itos(bucket.elem[idx].comp_size));
//print_line("Return: "+rstr);
2014-02-10 02:10:30 +01:00
return rstr;
}
}
void PHashTranslation::_get_property_list(List<PropertyInfo> *p_list) const {
2014-02-10 02:10:30 +01:00
p_list->push_back(PropertyInfo(Variant::POOL_INT_ARRAY, "hash_table"));
p_list->push_back(PropertyInfo(Variant::POOL_INT_ARRAY, "bucket_table"));
p_list->push_back(PropertyInfo(Variant::POOL_BYTE_ARRAY, "strings"));
p_list->push_back(PropertyInfo(Variant::OBJECT, "load_from", PROPERTY_HINT_RESOURCE_TYPE, "Translation", PROPERTY_USAGE_EDITOR));
2014-02-10 02:10:30 +01:00
}
void PHashTranslation::_bind_methods() {
ClassDB::bind_method(D_METHOD("generate", "from"), &PHashTranslation::generate);
2014-02-10 02:10:30 +01:00
}
PHashTranslation::PHashTranslation() {
2014-02-10 02:10:30 +01:00
}