require "json"
require "compress/gzip"
# Compress
def zip(s : String) : String
return String.build do |io|
Compress::Gzip::Writer.open(io) do |gzip|
gzip << s
end
end
end
# Decompress
def unzip(s : String) : String
return Compress::Gzip::Reader.open(IO::Memory.new (s)) do |gzip|
gzip.gets_to_end
end
end
# Create a Crystal hash
person = {"name" => "John", "age" => 30, "city" => "New York"}
# Convert the hash to a JSON string
compressed_json_string = zip(person.to_json)
d = JSON.parse(unzip(compressed_json_string))
File.write("archive.zip", compressed_json_string)
data = File.read("archive.zip")
d = JSON.parse(unzip(data))
p d["name"]
1 Like
Check out msgpack.
Zip compression for data types like this will have a lot of overhead with little net gain unless your hashes are huge.
2 Likes
Jus experimenting, but thanks nevertheless. I appreciate your help!
I actually do something similar in production using Compress::Gzip::Reader and Writer
for Total Real Returns, which does have large quantities of data where the data compresses decently well, like the JSON and CSV tables of daily stock prices that I get from upstream data providers, and then transformations I apply to adjust for inflation/dividends/splits. Iām sure there are better ways to compress the table, but this one sure is easy and performant
4 Likes