diff options
Diffstat (limited to 'v_windows/v/old/vlib/x')
| -rw-r--r-- | v_windows/v/old/vlib/x/json2/README.md | 175 | ||||
| -rw-r--r-- | v_windows/v/old/vlib/x/json2/any_test.v | 130 | ||||
| -rw-r--r-- | v_windows/v/old/vlib/x/json2/decoder.v | 200 | ||||
| -rw-r--r-- | v_windows/v/old/vlib/x/json2/decoder_test.v | 61 | ||||
| -rw-r--r-- | v_windows/v/old/vlib/x/json2/encoder.v | 179 | ||||
| -rw-r--r-- | v_windows/v/old/vlib/x/json2/encoder_test.v | 29 | ||||
| -rw-r--r-- | v_windows/v/old/vlib/x/json2/json2.v | 122 | ||||
| -rw-r--r-- | v_windows/v/old/vlib/x/json2/json2_test.v | 398 | ||||
| -rw-r--r-- | v_windows/v/old/vlib/x/json2/scanner.v | 306 | ||||
| -rw-r--r-- | v_windows/v/old/vlib/x/json2/scanner_test.v | 351 | ||||
| -rw-r--r-- | v_windows/v/old/vlib/x/ttf/README.md | 310 | ||||
| -rw-r--r-- | v_windows/v/old/vlib/x/ttf/common.v | 205 | ||||
| -rw-r--r-- | v_windows/v/old/vlib/x/ttf/render_bmp.v | 825 | ||||
| -rw-r--r-- | v_windows/v/old/vlib/x/ttf/render_sokol_cpu.v | 210 | ||||
| -rw-r--r-- | v_windows/v/old/vlib/x/ttf/text_block.v | 120 | ||||
| -rw-r--r-- | v_windows/v/old/vlib/x/ttf/ttf.v | 1085 | ||||
| -rw-r--r-- | v_windows/v/old/vlib/x/ttf/ttf_test.v | 237 | ||||
| -rw-r--r-- | v_windows/v/old/vlib/x/ttf/ttf_test_data.bin | bin | 0 -> 16124 bytes | 
18 files changed, 4943 insertions, 0 deletions
diff --git a/v_windows/v/old/vlib/x/json2/README.md b/v_windows/v/old/vlib/x/json2/README.md new file mode 100644 index 0000000..fcefbff --- /dev/null +++ b/v_windows/v/old/vlib/x/json2/README.md @@ -0,0 +1,175 @@ +> The name `json2` was chosen to avoid any unwanted potential conflicts with the +> existing codegen tailored for the main `json` module which is powered by CJSON. + +`x.json2` is an experimental JSON parser written from scratch on V. + +## Usage +```v oksyntax +import x.json2 +import net.http + +fn main() { +	// Decoding +	resp := http.get('https://example.com') ? + +	// raw decode +	raw_person := json2.raw_decode(resp.text) ? + +	// Casting `Any` type / Navigating +	person := raw_person.as_map() +	name := person['name'].str() // Bob +	age := person['age'].int() // 19 +	pi := person['pi'].f64() // 3.14.... + +	// Constructing an `Any` type +	mut me := map[string]json2.Any{} +	me['name'] = 'Bob' +	me['age'] = 18 + +	mut arr := []json2.Any{} +	arr << 'rock' +	arr << 'papers' +	arr << json2.null +	arr << 12 + +	me['interests'] = arr + +	mut pets := map[string]json2.Any{} +	pets['Sam'] = 'Maltese Shitzu' +	me['pets'] = pets + +	// Stringify to JSON +	println(me.str()) +	//{ +	//   "name":"Bob", +	//   "age":18, +	//   "interests":["rock","papers","scissors",null,12], +	//   "pets":{"Sam":"Maltese"} +	//} + +	// Encode a struct/type to JSON +	encoded_json := json2.encode<Person>(person2) +} +``` +## Using `decode<T>` and `encode<T>` +> Codegen for this feature is still WIP. +> You need to manually define the methods before using the module to structs. + +In order to use the `decode<T>` and `encode<T>` function, you need to explicitly define +two methods: `from_json` and `to_json`. `from_json` accepts a `json2.Any` argument +and inside of it you need to map the fields you're going to put into the type. +As for `to_json` method, you just need to map the values into `json2.Any` +and turn it into a string. + +```v ignore +struct Person { +mut: +    name string +    age  int = 20 +    pets []string +} + +fn (mut p Person) from_json(f json2.Any) { +    obj := f.as_map() +    for k, v in obj { +        match k { +            'name' { p.name = v.str() } +            'age' { p.age = v.int() } +            'pets' { p.pets = v.arr().map(it.str()) } +            else {} +        } +    } +} + +fn (p Person) to_json() string { +    mut obj := map[string]json2.Any +    obj['name'] = p.name +    obj['age'] = p.age +    obj['pets'] = p.pets +    return obj.str() +} + +fn main() { +    resp := os.read_file('./person.json')? +    person := json2.decode<Person>(resp)? +    println(person) // Person{name: 'Bob', age: 28, pets: ['Floof']} +    person_json := json2.encode<Person>(person) +    println(person_json) // {"name": "Bob", "age": 28, "pets": ["Floof"]} +} +``` + +## Using struct tags +`x.json2` can access and use the struct field tags similar to the +`json` module by using the comp-time `$for` for structs. + +```v ignore +fn (mut p Person) from_json(f json2.Any) { +    mp := an.as_map() +	mut js_field_name := '' +    $for field in Person.fields { +        js_field_name = field.name + +        for attr in field.attrs { +			if attr.starts_with('json:') { +				js_field_name = attr.all_after('json:').trim_left(' ') +				break +			} +		} + +        match field.name { +            'name' { p.name = mp[js_field_name].str() } +			'age' { u.age = mp[js_field_name].int() } +			'pets' { u.pets = mp[js_field_name].arr().map(it.str()) } +			else {} +		} +    } +} +``` + +### Null Values +`x.json2` has a separate `null` type for differentiating an undefined value and a null value. +To verify that the field you're accessing is a `null`, use `<typ> is json2.Null`. + +```v ignore +fn (mut p Person) from_json(f json2.Any) { +    obj := f.as_map() +    if obj['age'] is json2.Null { +        // use a default value +        p.age = 10 +    } +} +``` + +### Custom field names +Aside from using struct tags, you can also just simply cast the base field into a map (`as_map()`) +and access the field you wish to put into the struct/type. + +```v ignore +fn (mut p Person) from_json(f json2.Any) { +    obj := f.as_map() +    p.name = obj['nickname'].str() +} +``` + +```v oksyntax +fn (mut p Person) to_json() string { +	obj := f.as_map() +	obj['nickname'] = p.name +	return obj.str() +} +``` + +### Undefined Values +Getting undefined values has the same behavior as regular V types. +If you're casting a base field into `map[string]json2.Any` and fetch an undefined entry/value, +it simply returns empty. As for the `[]json2.Any`, it returns an index error. + +## Casting a value to an incompatible type +`x.json2` provides methods for turning `Any` types into usable types. +The following list shows the possible outputs when casting a value to an incompatible type. + +1. Casting non-array values as array (`arr()`) will return an array with the value as the content. +2. Casting non-map values as map (`as_map()`) will return a map with the value as the content. +3. Casting non-string values to string (`str()`) will return the +JSON string representation of the value. +4. Casting non-numeric values to int/float (`int()`/`i64()`/`f32()`/`f64()`) will return zero. diff --git a/v_windows/v/old/vlib/x/json2/any_test.v b/v_windows/v/old/vlib/x/json2/any_test.v new file mode 100644 index 0000000..6f86900 --- /dev/null +++ b/v_windows/v/old/vlib/x/json2/any_test.v @@ -0,0 +1,130 @@ +import x.json2 + +const ( +	sample_data = map{ +		'int':  json2.Any(int(1)) +		'i64':  json2.Any(i64(128)) +		'f32':  json2.Any(f32(2.0)) +		'f64':  json2.Any(f64(1.283)) +		'bool': json2.Any(false) +		'str':  json2.Any('test') +		'null': json2.Any(json2.null) +		'arr':  json2.Any([json2.Any('lol')]) +		'obj':  json2.Any(map{ +			'foo': json2.Any(10) +		}) +	} +) + +fn is_null(f json2.Any) bool { +	match f { +		json2.Null { return true } +		else { return false } +	} +} + +fn test_f32() { +	// valid conversions +	assert sample_data['int'].f32() == 1.0 +	assert sample_data['i64'].f32() == 128.0 +	assert sample_data['f32'].f32() == 2.0 +	assert sample_data['f64'].f32() == 1.2829999923706055 +	// invalid conversions +	assert sample_data['bool'].f32() == 0.0 +	assert sample_data['str'].f32() == 0.0 +	assert sample_data['null'].f32() == 0.0 +	assert sample_data['arr'].f32() == 0.0 +	assert sample_data['obj'].f32() == 0.0 +} + +fn test_f64() { +	// valid conversions +	assert sample_data['int'].f64() == 1.0 +	assert sample_data['i64'].f64() == 128.0 +	assert sample_data['f32'].f64() == 2.0 +	assert sample_data['f64'].f64() == 1.283 +	// invalid conversions +	assert sample_data['bool'].f64() == 0.0 +	assert sample_data['str'].f64() == 0.0 +	assert sample_data['null'].f64() == 0.0 +	assert sample_data['arr'].f64() == 0.0 +	assert sample_data['obj'].f64() == 0.0 +} + +fn test_int() { +	// valid conversions +	assert sample_data['int'].int() == 1 +	assert sample_data['i64'].int() == 128 +	assert sample_data['f32'].int() == 2 +	assert sample_data['f64'].int() == 1 +	assert json2.Any(true).int() == 1 +	// invalid conversions +	assert json2.Any('123').int() == 0 +	assert sample_data['null'].int() == 0 +	assert sample_data['arr'].int() == 0 +	assert sample_data['obj'].int() == 0 +} + +fn test_i64() { +	// valid conversions +	assert sample_data['int'].i64() == 1 +	assert sample_data['i64'].i64() == 128 +	assert sample_data['f32'].i64() == 2 +	assert sample_data['f64'].i64() == 1 +	assert json2.Any(true).i64() == 1 +	// invalid conversions +	assert json2.Any('123').i64() == 0 +	assert sample_data['null'].i64() == 0 +	assert sample_data['arr'].i64() == 0 +	assert sample_data['obj'].i64() == 0 +} + +fn test_as_map() { +	assert sample_data['int'].as_map()['0'].int() == 1 +	assert sample_data['i64'].as_map()['0'].i64() == 128.0 +	assert sample_data['f32'].as_map()['0'].f32() == 2.0 +	assert sample_data['f64'].as_map()['0'].f64() == 1.283 +	assert sample_data['bool'].as_map()['0'].bool() == false +	assert sample_data['str'].as_map()['0'].str() == 'test' +	assert is_null(sample_data['null'].as_map()['0']) == true +	assert sample_data['arr'].as_map()['0'].str() == 'lol' +	assert sample_data['obj'].as_map()['foo'].int() == 10 +} + +fn test_arr() { +	assert sample_data['int'].arr()[0].int() == 1 +	assert sample_data['i64'].arr()[0].i64() == 128.0 +	assert sample_data['f32'].arr()[0].f32() == 2.0 +	assert sample_data['f64'].arr()[0].f64() == 1.283 +	assert sample_data['bool'].arr()[0].bool() == false +	assert sample_data['str'].arr()[0].str() == 'test' +	assert is_null(sample_data['null'].arr()[0]) == true +	assert sample_data['arr'].arr()[0].str() == 'lol' +	assert sample_data['obj'].arr()[0].int() == 10 +} + +fn test_bool() { +	// valid conversions +	assert sample_data['bool'].bool() == false +	assert json2.Any('true').bool() == true +	// invalid conversions +	assert sample_data['int'].bool() == false +	assert sample_data['i64'].bool() == false +	assert sample_data['f32'].bool() == false +	assert sample_data['f64'].bool() == false +	assert sample_data['null'].bool() == false +	assert sample_data['arr'].bool() == false +	assert sample_data['obj'].bool() == false +} + +fn test_str() { +	assert sample_data['int'].str() == '1' +	assert sample_data['i64'].str() == '128' +	assert sample_data['f32'].str() == '2.0' +	assert sample_data['f64'].str() == '1.283' +	assert sample_data['bool'].str() == 'false' +	assert sample_data['str'].str() == 'test' +	assert sample_data['null'].str() == 'null' +	assert sample_data['arr'].str() == '["lol"]' +	assert sample_data.str() == '{"int":1,"i64":128,"f32":2.0,"f64":1.283,"bool":false,"str":"test","null":null,"arr":["lol"],"obj":{"foo":10}}' +} diff --git a/v_windows/v/old/vlib/x/json2/decoder.v b/v_windows/v/old/vlib/x/json2/decoder.v new file mode 100644 index 0000000..a45a091 --- /dev/null +++ b/v_windows/v/old/vlib/x/json2/decoder.v @@ -0,0 +1,200 @@ +// Copyright (c) 2019-2021 Alexander Medvednikov. All rights reserved. +// Use of this source code is governed by an MIT license +// that can be found in the LICENSE file. +module json2 + +// `Any` is a sum type that lists the possible types to be decoded and used. +pub type Any = Null | []Any | bool | f32 | f64 | i64 | int | map[string]Any | string | +	u64 + +// `Null` struct is a simple representation of the `null` value in JSON. +pub struct Null { +	is_null bool = true +} + +struct Parser { +mut: +	scanner      &Scanner +	p_tok        Token +	tok          Token +	n_tok        Token +	n_level      int +	convert_type bool = true +} + +struct InvalidTokenError { +	msg  string +	code int +} + +struct UnknownTokenError { +	msg  string +	code int +} + +fn (mut p Parser) next() { +	p.p_tok = p.tok +	p.tok = p.n_tok +	p.n_tok = p.scanner.scan() +} + +fn (mut p Parser) next_with_err() ? { +	p.next() +	if p.tok.kind == .error { +		return error(p.emit_error(p.tok.lit.bytestr())) +	} +} + +fn (p Parser) emit_error(msg string) string { +	line := p.tok.line +	column := p.tok.col + p.tok.lit.len +	return '[x.json2] $msg ($line:$column)' +} + +// TODO: copied from v.util to avoid the entire module and its functions +// from being imported. remove later once -skip-unused is enabled by default. +fn skip_bom(file_content string) string { +	mut raw_text := file_content +	// BOM check +	if raw_text.len >= 3 { +		unsafe { +			c_text := raw_text.str +			if c_text[0] == 0xEF && c_text[1] == 0xBB && c_text[2] == 0xBF { +				// skip three BOM bytes +				offset_from_begin := 3 +				raw_text = tos(c_text[offset_from_begin], vstrlen(c_text) - offset_from_begin) +			} +		} +	} +	return raw_text +} + +fn new_parser(srce string, convert_type bool) Parser { +	src := skip_bom(srce) +	return Parser{ +		scanner: &Scanner{ +			text: src.bytes() +		} +		convert_type: convert_type +	} +} + +fn (mut p Parser) decode() ?Any { +	p.next() +	p.next_with_err() ? +	fi := p.decode_value() ? +	if p.tok.kind != .eof { +		return IError(&InvalidTokenError{ +			msg: p.emit_error('invalid token `$p.tok.kind`') +		}) +	} +	return fi +} + +fn (mut p Parser) decode_value() ?Any { +	if p.n_level + 1 == 500 { +		return error(p.emit_error('reached maximum nesting level of 500')) +	} +	match p.tok.kind { +		.lsbr { +			return p.decode_array() +		} +		.lcbr { +			return p.decode_object() +		} +		.int_, .float { +			tl := p.tok.lit.bytestr() +			kind := p.tok.kind +			p.next_with_err() ? +			if p.convert_type { +				if kind == .float { +					return Any(tl.f64()) +				} +				return Any(tl.i64()) +			} +			return Any(tl) +		} +		.bool_ { +			lit := p.tok.lit.bytestr() +			p.next_with_err() ? +			if p.convert_type { +				return Any(lit.bool()) +			} +			return Any(lit) +		} +		.null { +			p.next_with_err() ? +			if p.convert_type { +				return Any(null) +			} +			return Any('null') +		} +		.str_ { +			str := p.tok.lit.bytestr() +			p.next_with_err() ? +			return Any(str) +		} +		else { +			return IError(&InvalidTokenError{ +				msg: p.emit_error('invalid token `$p.tok.kind`') +			}) +		} +	} +	return Any(null) +} + +fn (mut p Parser) decode_array() ?Any { +	mut items := []Any{} +	p.next_with_err() ? +	p.n_level++ +	for p.tok.kind != .rsbr { +		item := p.decode_value() ? +		items << item +		if p.tok.kind == .comma { +			p.next_with_err() ? +			if p.tok.kind == .rsbr || p.tok.kind == .rcbr { +				return IError(&InvalidTokenError{ +					msg: p.emit_error('invalid token `$p.tok.lit') +				}) +			} +		} else if p.tok.kind == .rsbr { +			break +		} else { +			return IError(&UnknownTokenError{ +				msg: p.emit_error("unknown token '$p.tok.lit' when decoding array.") +			}) +		} +	} +	p.next_with_err() ? +	p.n_level-- +	return Any(items) +} + +fn (mut p Parser) decode_object() ?Any { +	mut fields := map[string]Any{} +	p.next_with_err() ? +	p.n_level++ +	for p.tok.kind != .rcbr { +		is_key := p.tok.kind == .str_ && p.n_tok.kind == .colon +		if !is_key { +			return IError(&InvalidTokenError{ +				msg: p.emit_error('invalid token `$p.tok.kind`, expecting `str_`') +			}) +		} +		cur_key := p.tok.lit.bytestr() +		p.next_with_err() ? +		p.next_with_err() ? +		fields[cur_key] = p.decode_value() ? +		if p.tok.kind == .comma { +			p.next_with_err() ? +			if p.tok.kind != .str_ { +				return IError(&UnknownTokenError{ +					msg: p.emit_error("unknown token '$p.tok.lit' when decoding object.") +				}) +			} +		} +	} +	p.next_with_err() ? +	p.n_level-- +	return Any(fields) +} diff --git a/v_windows/v/old/vlib/x/json2/decoder_test.v b/v_windows/v/old/vlib/x/json2/decoder_test.v new file mode 100644 index 0000000..f80f8b2 --- /dev/null +++ b/v_windows/v/old/vlib/x/json2/decoder_test.v @@ -0,0 +1,61 @@ +module json2 + +fn test_raw_decode_string() ? { +	str := raw_decode('"Hello!"') ? +	assert str.str() == 'Hello!' +} + +fn test_raw_decode_number() ? { +	num := raw_decode('123') ? +	assert num.int() == 123 +} + +fn test_raw_decode_array() ? { +	raw_arr := raw_decode('["Foo", 1]') ? +	arr := raw_arr.arr() +	assert arr[0].str() == 'Foo' +	assert arr[1].int() == 1 +} + +fn test_raw_decode_bool() ? { +	bol := raw_decode('false') ? +	assert bol.bool() == false +} + +fn test_raw_decode_map() ? { +	raw_mp := raw_decode('{"name":"Bob","age":20}') ? +	mp := raw_mp.as_map() +	assert mp['name'].str() == 'Bob' +	assert mp['age'].int() == 20 +} + +fn test_raw_decode_null() ? { +	nul := raw_decode('null') ? +	assert nul is Null +} + +fn test_raw_decode_invalid() ? { +	raw_decode('1z') or { +		assert err.msg == '[x.json2] invalid token `z` (0:17)' +		return +	} +	assert false +} + +fn test_raw_decode_string_with_dollarsign() ? { +	str := raw_decode(r'"Hello $world"') ? +	assert str.str() == r'Hello $world' +} + +fn test_raw_decode_map_with_whitespaces() ? { +	raw_mp := raw_decode(' \n\t{"name":"Bob","age":20}\n\t') ? +	mp := raw_mp.as_map() +	assert mp['name'].str() == 'Bob' +	assert mp['age'].int() == 20 +} + +fn test_nested_array_object() ? { +	mut parser := new_parser(r'[[[[[],[],[]]]],{"Test":{}},[[]]]', false) +	decoded := parser.decode() ? +	assert parser.n_level == 0 +} diff --git a/v_windows/v/old/vlib/x/json2/encoder.v b/v_windows/v/old/vlib/x/json2/encoder.v new file mode 100644 index 0000000..b1ca0e4 --- /dev/null +++ b/v_windows/v/old/vlib/x/json2/encoder.v @@ -0,0 +1,179 @@ +// Copyright (c) 2019-2021 Alexander Medvednikov. All rights reserved. +// Use of this source code is governed by an MIT license +// that can be found in the LICENSE file. +module json2 + +import strings + +fn write_value(v Any, i int, len int, mut wr strings.Builder) { +	str := v.json_str() +	if v is string { +		wr.write_string('"$str"') +	} else { +		wr.write_string(str) +	} +	if i >= len - 1 { +		return +	} +	wr.write_b(`,`) +} + +// str returns the string representation of the `map[string]Any`. +pub fn (flds map[string]Any) str() string { +	mut wr := strings.new_builder(200) +	wr.write_b(`{`) +	mut i := 0 +	for k, v in flds { +		wr.write_string('"$k":') +		write_value(v, i, flds.len, mut wr) +		i++ +	} +	wr.write_b(`}`) +	defer { +		unsafe { wr.free() } +	} +	res := wr.str() +	return res +} + +// str returns the string representation of the `[]Any`. +pub fn (flds []Any) str() string { +	mut wr := strings.new_builder(200) +	wr.write_b(`[`) +	for i, v in flds { +		write_value(v, i, flds.len, mut wr) +	} +	wr.write_b(`]`) +	defer { +		unsafe { wr.free() } +	} +	res := wr.str() +	return res +} + +// str returns the string representation of the `Any` type. Use the `json_str` method +// if you want to use the escaped str() version of the `Any` type. +pub fn (f Any) str() string { +	if f is string { +		return f +	} else { +		return f.json_str() +	} +} + +// json_str returns the JSON string representation of the `Any` type. +pub fn (f Any) json_str() string { +	match f { +		string { +			return json_string(f) +		} +		int { +			return f.str() +		} +		u64, i64 { +			return f.str() +		} +		f32 { +			str_f32 := f.str() +			if str_f32.ends_with('.') { +				return '${str_f32}0' +			} +			return str_f32 +		} +		f64 { +			str_f64 := f.str() +			if str_f64.ends_with('.') { +				return '${str_f64}0' +			} +			return str_f64 +		} +		bool { +			return f.str() +		} +		map[string]Any { +			return f.str() +		} +		[]Any { +			return f.str() +		} +		Null { +			return 'null' +		} +	} +} + +// char_len_list is a modified version of builtin.utf8_str_len +// that returns an array of character lengths. (e.g "t✔" => [1,2]) +fn char_len_list(s string) []int { +	mut l := 1 +	mut ls := []int{} +	for i := 0; i < s.len; i++ { +		c := s[i] +		if (c & (1 << 7)) != 0 { +			for t := byte(1 << 6); (c & t) != 0; t >>= 1 { +				l++ +				i++ +			} +		} +		ls << l +		l = 1 +	} +	return ls +} + +const escaped_chars = [r'\b', r'\f', r'\n', r'\r', r'\t'] + +// json_string returns the JSON spec-compliant version of the string. +[manualfree] +fn json_string(s string) string { +	// not the best implementation but will revisit it soon +	char_lens := char_len_list(s) +	mut sb := strings.new_builder(s.len) +	mut i := 0 +	defer { +		unsafe { +			char_lens.free() +			// freeing string builder on defer after +			// returning .str() still isn't working :( +			// sb.free() +		} +	} +	for char_len in char_lens { +		if char_len == 1 { +			chr := s[i] +			if chr in important_escapable_chars { +				for j := 0; j < important_escapable_chars.len; j++ { +					if chr == important_escapable_chars[j] { +						sb.write_string(json2.escaped_chars[j]) +						break +					} +				} +			} else if chr == `"` || chr == `/` || chr == `\\` { +				sb.write_string('\\' + chr.ascii_str()) +			} else { +				sb.write_b(chr) +			} +		} else { +			slice := s[i..i + char_len] +			hex_code := slice.utf32_code().hex() +			if hex_code.len < 4 { +				// an utf8 codepoint +				sb.write_string(slice) +			} else if hex_code.len == 4 { +				sb.write_string('\\u$hex_code') +			} else { +				// TODO: still figuring out what +				// to do with more than 4 chars +				sb.write_b(` `) +			} +			unsafe { +				slice.free() +				hex_code.free() +			} +		} +		i += char_len +	} +	str := sb.str() +	unsafe { sb.free() } +	return str +} diff --git a/v_windows/v/old/vlib/x/json2/encoder_test.v b/v_windows/v/old/vlib/x/json2/encoder_test.v new file mode 100644 index 0000000..8135172 --- /dev/null +++ b/v_windows/v/old/vlib/x/json2/encoder_test.v @@ -0,0 +1,29 @@ +import x.json2 + +fn test_json_string_characters() { +	text := json2.raw_decode(r'"\n\r\b\f\t\\\"\/"') or { '' } +	assert text.json_str() == '\\n\\r\\b\\f\\t\\\\\\"\\/' +} + +fn test_json_string() { +	text := json2.Any('te✔st') +	assert text.json_str() == r'te\u2714st' +} + +fn test_json_string_emoji() { +	text := json2.Any('🐈') +	assert text.json_str() == r' ' +} + +fn test_json_string_non_ascii() { +	text := json2.Any('ひらがな') +	assert text.json_str() == r'\u3072\u3089\u304c\u306a' +} + +fn test_utf8_strings_are_not_modified() ? { +	original := '{"s":"Schilddrüsenerkrankungen"}' +	// dump(original) +	deresult := json2.raw_decode(original) ? +	// dump(deresult) +	assert deresult.str() == original +} diff --git a/v_windows/v/old/vlib/x/json2/json2.v b/v_windows/v/old/vlib/x/json2/json2.v new file mode 100644 index 0000000..0e5012c --- /dev/null +++ b/v_windows/v/old/vlib/x/json2/json2.v @@ -0,0 +1,122 @@ +// Copyright (c) 2019-2021 Alexander Medvednikov. All rights reserved. +// Use of this source code is governed by an MIT license +// that can be found in the LICENSE file. +module json2 + +pub const ( +	null = Null{} +) + +pub interface Serializable { +	from_json(f Any) +	to_json() string +} + +// Decodes a JSON string into an `Any` type. Returns an option. +pub fn raw_decode(src string) ?Any { +	mut p := new_parser(src, true) +	return p.decode() +} + +// Same with `raw_decode`, but skips the type conversion for certain types when decoding a certain value. +pub fn fast_raw_decode(src string) ?Any { +	mut p := new_parser(src, false) +	return p.decode() +} + +// decode is a generic function that decodes a JSON string into the target type. +pub fn decode<T>(src string) ?T { +	res := raw_decode(src) ? +	mut typ := T{} +	typ.from_json(res) +	return typ +} + +// encode is a generic function that encodes a type into a JSON string. +pub fn encode<T>(typ T) string { +	return typ.to_json() +} + +// as_map uses `Any` as a map. +pub fn (f Any) as_map() map[string]Any { +	if f is map[string]Any { +		return f +	} else if f is []Any { +		mut mp := map[string]Any{} +		for i, fi in f { +			mp['$i'] = fi +		} +		return mp +	} +	return map{ +		'0': f +	} +} + +// int uses `Any` as an integer. +pub fn (f Any) int() int { +	match f { +		int { return f } +		i64, f32, f64, bool { return int(f) } +		else { return 0 } +	} +} + +// i64 uses `Any` as a 64-bit integer. +pub fn (f Any) i64() i64 { +	match f { +		i64 { return f } +		int, f32, f64, bool { return i64(f) } +		else { return 0 } +	} +} + +// u64 uses `Any` as a 64-bit unsigned integer. +pub fn (f Any) u64() u64 { +	match f { +		u64 { return f } +		int, i64, f32, f64, bool { return u64(f) } +		else { return 0 } +	} +} + +// f32 uses `Any` as a 32-bit float. +pub fn (f Any) f32() f32 { +	match f { +		f32 { return f } +		int, i64, f64 { return f32(f) } +		else { return 0.0 } +	} +} + +// f64 uses `Any` as a float. +pub fn (f Any) f64() f64 { +	match f { +		f64 { return f } +		int, i64, f32 { return f64(f) } +		else { return 0.0 } +	} +} + +// arr uses `Any` as an array. +pub fn (f Any) arr() []Any { +	if f is []Any { +		return f +	} else if f is map[string]Any { +		mut arr := []Any{} +		for _, v in f { +			arr << v +		} +		return arr +	} +	return [f] +} + +// bool uses `Any` as a bool +pub fn (f Any) bool() bool { +	match f { +		bool { return f } +		string { return f.bool() } +		else { return false } +	} +} diff --git a/v_windows/v/old/vlib/x/json2/json2_test.v b/v_windows/v/old/vlib/x/json2/json2_test.v new file mode 100644 index 0000000..b808f42 --- /dev/null +++ b/v_windows/v/old/vlib/x/json2/json2_test.v @@ -0,0 +1,398 @@ +import x.json2 + +enum JobTitle { +	manager +	executive +	worker +} + +struct Employee { +pub mut: +	name   string +	age    int +	salary f32 +	title  JobTitle +} + +fn (e Employee) to_json() string { +	mut mp := map[string]json2.Any{} +	mp['name'] = e.name +	mp['age'] = e.age +	mp['salary'] = e.salary +	mp['title'] = int(e.title) +	/* +	$for field in Employee.fields { +		d := e.$(field.name) + +		$if field.typ is JobTitle { +			mp[field.name] = json.encode<int>(d) +		} $else { +			mp[field.name] = d +		} +	} +	*/ +	return mp.str() +} + +fn (mut e Employee) from_json(any json2.Any) { +	mp := any.as_map() +	e.name = mp['name'].str() +	e.age = mp['age'].int() +	e.salary = mp['salary'].f32() +	e.title = JobTitle(mp['title'].int()) +} + +fn test_simple() { +	x := Employee{'Peter', 28, 95000.5, .worker} +	s := json2.encode<Employee>(x) +	eprintln('Employee x: $s') +	assert s == '{"name":"Peter","age":28,"salary":95000.5,"title":2}' +	y := json2.decode<Employee>(s) or { +		println(err) +		assert false +		return +	} +	eprintln('Employee y: $y') +	assert y.name == 'Peter' +	assert y.age == 28 +	assert y.salary == 95000.5 +	assert y.title == .worker +} + +fn test_fast_raw_decode() { +	s := '{"name":"Peter","age":28,"salary":95000.5,"title":2}' +	o := json2.fast_raw_decode(s) or { +		assert false +		json2.Any(json2.null) +	} +	str := o.str() +	assert str == '{"name":"Peter","age":"28","salary":"95000.5","title":"2"}' +} + +fn test_character_unescape() { +	message := r'{ +	"newline": "new\nline", +	"tab": "\ttab", +	"backslash": "back\\slash", +	"quotes": "\"quotes\"", +	"slash":"\/dev\/null" +}' +	mut obj := json2.raw_decode(message) or { +		println(err) +		assert false +		return +	} +	lines := obj.as_map() +	eprintln('$lines') +	assert lines['newline'].str() == 'new\nline' +	assert lines['tab'].str() == '\ttab' +	assert lines['backslash'].str() == 'back\\slash' +	assert lines['quotes'].str() == '"quotes"' +	assert lines['slash'].str() == '/dev/null' +} + +struct User2 { +pub mut: +	age  int +	nums []int +} + +fn (mut u User2) from_json(an json2.Any) { +	mp := an.as_map() +	mut js_field_name := '' +	$for field in User.fields { +		js_field_name = field.name +		for attr in field.attrs { +			if attr.starts_with('json:') { +				js_field_name = attr.all_after('json:').trim_left(' ') +				break +			} +		} +		match field.name { +			'age' { u.age = mp[js_field_name].int() } +			'nums' { u.nums = mp[js_field_name].arr().map(it.int()) } +			else {} +		} +	} +} + +// User struct needs to be `pub mut` for now in order to access and manipulate values +struct User { +pub mut: +	age           int +	nums          []int +	last_name     string [json: lastName] +	is_registered bool   [json: IsRegistered] +	typ           int    [json: 'type'] +	pets          string [json: 'pet_animals'; raw] +} + +fn (mut u User) from_json(an json2.Any) { +	mp := an.as_map() +	mut js_field_name := '' +	$for field in User.fields { +		// FIXME: C error when initializing js_field_name inside comptime for +		js_field_name = field.name +		for attr in field.attrs { +			if attr.starts_with('json:') { +				js_field_name = attr.all_after('json:').trim_left(' ') +				break +			} +		} +		match field.name { +			'age' { u.age = mp[js_field_name].int() } +			'nums' { u.nums = mp[js_field_name].arr().map(it.int()) } +			'last_name' { u.last_name = mp[js_field_name].str() } +			'is_registered' { u.is_registered = mp[js_field_name].bool() } +			'typ' { u.typ = mp[js_field_name].int() } +			'pets' { u.pets = mp[js_field_name].str() } +			else {} +		} +	} +} + +fn (u User) to_json() string { +	// TODO: derive from field +	mut mp := map{ +		'age': json2.Any(u.age) +	} +	mp['nums'] = u.nums.map(json2.Any(it)) +	mp['lastName'] = u.last_name +	mp['IsRegistered'] = u.is_registered +	mp['type'] = u.typ +	mp['pet_animals'] = u.pets +	return mp.str() +} + +fn test_parse_user() { +	s := '{"age": 10, "nums": [1,2,3], "type": 1, "lastName": "Johnson", "IsRegistered": true, "pet_animals": {"name": "Bob", "animal": "Dog"}}' +	u2 := json2.decode<User2>(s) or { +		println(err) +		assert false +		return +	} +	println(u2) +	u := json2.decode<User>(s) or { +		println(err) +		assert false +		return +	} +	assert u.age == 10 +	assert u.last_name == 'Johnson' +	assert u.is_registered == true +	assert u.nums.len == 3 +	assert u.nums[0] == 1 +	assert u.nums[1] == 2 +	assert u.nums[2] == 3 +	assert u.typ == 1 +	assert u.pets == '{"name":"Bob","animal":"Dog"}' +} + +fn test_encode_user() { +	usr := User{ +		age: 10 +		nums: [1, 2, 3] +		last_name: 'Johnson' +		is_registered: true +		typ: 0 +		pets: 'foo' +	} +	expected := '{"age":10,"nums":[1,2,3],"lastName":"Johnson","IsRegistered":true,"type":0,"pet_animals":"foo"}' +	out := json2.encode<User>(usr) +	assert out == expected +} + +struct Color { +pub mut: +	space string +	point string [raw] +} + +fn (mut c Color) from_json(an json2.Any) { +	mp := an.as_map() +	$for field in Color.fields { +		match field.name { +			'space' { c.space = mp[field.name].str() } +			'point' { c.point = mp[field.name].str() } +			else {} +		} +	} +} + +fn test_raw_json_field() { +	color := json2.decode<Color>('{"space": "YCbCr", "point": {"Y": 123}}') or { +		assert false +		Color{} +	} +	assert color.point == '{"Y":123}' +	assert color.space == 'YCbCr' +} + +/* +struct City { +	name string +} + +struct Country { +	cities []City +	name   string +} + +fn test_struct_in_struct() { +	country := json.decode(Country, '{ "name": "UK", "cities": [{"name":"London"}, {"name":"Manchester"}]}') or { +		assert false +		exit(1) +	} +	assert country.name == 'UK' +	assert country.cities.len == 2 +	assert country.cities[0].name == 'London' +	assert country.cities[1].name == 'Manchester' +	println(country.cities) +} +*/ +fn test_encode_map() { +	expected := '{"one":1,"two":2,"three":3,"four":4}' +	numbers := map{ +		'one':   json2.Any(1) +		'two':   json2.Any(2) +		'three': json2.Any(3) +		'four':  json2.Any(4) +	} +	out := numbers.str() +	assert out == expected +} + +/* +fn test_parse_map() { +	expected := { +		'one': 1 +		'two': 2 +		'three': 3 +		'four': 4 +	} +	out := json.decode<map[string]int>('{"one":1,"two":2,"three":3,"four":4}') or { +		assert false +		r := { +			'': 0 +		} +		r +	} +	println(out) +	assert out == expected +} + +struct Data { +	countries []Country +	users     map[string]User +	extra     map[string]map[string]int +} + +fn test_nested_type() { +	data_expected := '{"countries":[{"cities":[{"name":"London"},{"name":"Manchester"}],"name":"UK"},{"cities":[{"name":"Donlon"},{"name":"Termanches"}],"name":"KU"}],"users":{"Foo":{"age":10,"nums":[1,2,3],"lastName":"Johnson","IsRegistered":true,"type":0,"pet_animals":"little foo"},"Boo":{"age":20,"nums":[5,3,1],"lastName":"Smith","IsRegistered":false,"type":4,"pet_animals":"little boo"}},"extra":{"2":{"n1":2,"n2":4,"n3":8,"n4":16},"3":{"n1":3,"n2":9,"n3":27,"n4":81}}}' + +	data := Data{ +		countries: [ +			Country{ +				name: 'UK' +				cities: [City{'London'}, +					City{'Manchester'}, +				] +			}, +			Country{ +				name: 'KU' +				cities: [City{'Donlon'}, +					City{'Termanches'}, +				] +			}, +		] +		users: { +			'Foo': User{ +				age: 10 +				nums: [1, 2, 3] +				last_name: 'Johnson' +				is_registered: true +				typ: 0 +				pets: 'little foo' +			}, +			'Boo': User{ +				age: 20 +				nums: [5, 3, 1] +				last_name: 'Smith' +				is_registered: false +				typ: 4 +				pets: 'little boo' +			} +		}, +		extra: { +			'2': { +				'n1': 2 +				'n2': 4 +				'n3': 8 +				'n4': 16 +			}, +			'3': { +				'n1': 3 +				'n2': 9 +				'n3': 27 +				'n4': 81 +			}, +		} +	} +	out := json.encode(data) +	println(out) +	assert out == data_expected + +	data2 := json.decode(Data, data_expected) or { +		assert false +		Data{} +	} +	assert data2.countries.len == data.countries.len +	for i in 0..1 { +		assert data2.countries[i].name == data.countries[i].name +		assert data2.countries[i].cities.len == data.countries[i].cities.len +		for j in 0..1 { +			assert data2.countries[i].cities[j].name == data.countries[i].cities[j].name +		} +	} + +	for key, user in data.users { +		assert data2.users[key].age == user.age +		assert data2.users[key].nums == user.nums +		assert data2.users[key].last_name == user.last_name +		assert data2.users[key].is_registered == user.is_registered +		assert data2.users[key].typ == user.typ +		// assert data2.users[key].pets == user.pets // TODO FIX +	} + +	for k, v in data.extra { +		for k2, v2 in v { +			assert data2.extra[k][k2] == v2 +		} +	} +} + +fn test_errors() { +	invalid_array := fn () { +		data := '{"countries":[{"cities":[{"name":"London"},{"name":"Manchester"}],"name":"UK"},{"cities":{"name":"Donlon"},"name":"KU"}],"users":{"Foo":{"age":10,"nums":[1,2,3],"lastName":"Johnson","IsRegistered":true,"type":0,"pet_animals":"little foo"},"Boo":{"age":20,"nums":[5,3,1],"lastName":"Smith","IsRegistered":false,"type":4,"pet_animals":"little boo"}},"extra":{"2":{"n1":2,"n2":4,"n3":8,"n4":16},"3":{"n1":3,"n2":9,"n3":27,"n4":81}}}' + +		json.decode(Data, data) or { +			println(err) +			assert err.starts_with('Json element is not an array:') +			return +		} +		assert false +	} +	invalid_object := fn() { +		data := '{"countries":[{"cities":[{"name":"London"},{"name":"Manchester"}],"name":"UK"},{"cities":[{"name":"Donlon"},{"name":"Termanches"}],"name":"KU"}],"users":[{"age":10,"nums":[1,2,3],"lastName":"Johnson","IsRegistered":true,"type":0,"pet_animals":"little foo"},{"age":20,"nums":[5,3,1],"lastName":"Smith","IsRegistered":false,"type":4,"pet_animals":"little boo"}],"extra":{"2":{"n1":2,"n2":4,"n3":8,"n4":16},"3":{"n1":3,"n2":9,"n3":27,"n4":81}}}' + +		json.decode(Data, data) or { +			println(err) +			assert err.starts_with('Json element is not an object:') +			return +		} +		assert false +	} +	invalid_array() +	invalid_object() +} +*/ diff --git a/v_windows/v/old/vlib/x/json2/scanner.v b/v_windows/v/old/vlib/x/json2/scanner.v new file mode 100644 index 0000000..473a83b --- /dev/null +++ b/v_windows/v/old/vlib/x/json2/scanner.v @@ -0,0 +1,306 @@ +// Copyright (c) 2019-2021 Alexander Medvednikov. All rights reserved. +// Use of this source code is governed by an MIT license +// that can be found in the LICENSE file. +module json2 + +import strconv + +struct Scanner { +mut: +	text []byte +	pos  int +	line int +	col  int +} + +enum TokenKind { +	none_ +	error +	str_ +	float +	int_ +	null +	bool_ +	eof +	comma = 44 +	colon = 58 +	lsbr = 91 +	rsbr = 93 +	lcbr = 123 +	rcbr = 125 +} + +struct Token { +	lit  []byte +	kind TokenKind +	line int +	col  int +} + +const ( +	// list of characters commonly used in JSON. +	char_list                 = [`{`, `}`, `[`, `]`, `,`, `:`] +	// list of newlines to check when moving to a new position. +	newlines                  = [`\r`, `\n`, `\t`] +	// list of escapable that needs to be escaped inside a JSON string. +	// double quotes and forward slashes are excluded intentionally since +	// they have their own separate checks for it in order to pass the +	// JSON test suite (https://github.com/nst/JSONTestSuite/). +	important_escapable_chars = [`\b`, `\f`, `\n`, `\r`, `\t`] +	// list of valid unicode escapes aside from \u{4-hex digits} +	valid_unicode_escapes     = [`b`, `f`, `n`, `r`, `t`, `\\`, `"`, `/`] +	// used for transforming escapes into valid unicode (eg. n => \n) +	unicode_transform_escapes = map{ +		98:  `\b` +		102: `\f` +		110: `\n` +		114: `\r` +		116: `\t` +		92:  `\\` +		34:  `"` +		47:  `/` +	} +	exp_signs = [byte(`-`), `+`] +) + +// move_pos proceeds to the next position. +fn (mut s Scanner) move() { +	s.move_pos(true, true) +} + +// move_pos_with_newlines is the same as move_pos but only enables newline checking. +fn (mut s Scanner) move_pos_with_newlines() { +	s.move_pos(false, true) +} + +fn (mut s Scanner) move_pos(include_space bool, include_newlines bool) { +	s.pos++ +	if s.pos < s.text.len { +		if include_newlines && s.text[s.pos] in json2.newlines { +			s.line++ +			s.col = 0 +			if s.text[s.pos] == `\r` && s.pos + 1 < s.text.len && s.text[s.pos + 1] == `\n` { +				s.pos++ +			} +			for s.pos < s.text.len && s.text[s.pos] in json2.newlines { +				s.move() +			} +		} else if include_space && s.text[s.pos] == ` ` { +			s.pos++ +			s.col++ +			for s.pos < s.text.len && s.text[s.pos] == ` ` { +				s.move() +			} +		} +	} else { +		s.col++ +	} +} + +// error returns an error token. +fn (s Scanner) error(description string) Token { +	return s.tokenize(description.bytes(), .error) +} + +// tokenize returns a token based on the given lit and kind. +fn (s Scanner) tokenize(lit []byte, kind TokenKind) Token { +	return Token{ +		lit: lit +		kind: kind +		col: s.col +		line: s.line +	} +} + +// text_scan scans and returns a string token. +[manualfree] +fn (mut s Scanner) text_scan() Token { +	mut has_closed := false +	mut chrs := []byte{} +	for { +		s.pos++ +		s.col++ +		if s.pos >= s.text.len { +			break +		} +		ch := s.text[s.pos] +		if (s.pos - 1 >= 0 && s.text[s.pos - 1] != `\\`) && ch == `"` { +			has_closed = true +			break +		} else if (s.pos - 1 >= 0 && s.text[s.pos - 1] != `\\`) +			&& ch in json2.important_escapable_chars { +			return s.error('character must be escaped with a backslash') +		} else if (s.pos == s.text.len - 1 && ch == `\\`) || ch == byte(0) { +			return s.error('invalid backslash escape') +		} else if s.pos + 1 < s.text.len && ch == `\\` { +			peek := s.text[s.pos + 1] +			if peek in json2.valid_unicode_escapes { +				chrs << json2.unicode_transform_escapes[int(peek)] +				s.pos++ +				s.col++ +				continue +			} else if peek == `u` { +				if s.pos + 5 < s.text.len { +					s.pos++ +					s.col++ +					mut codepoint := []byte{} +					codepoint_start := s.pos +					for s.pos < s.text.len && s.pos < codepoint_start + 4 { +						s.pos++ +						s.col++ +						if s.text[s.pos] == `"` { +							break +						} else if !s.text[s.pos].is_hex_digit() { +							x := s.text[s.pos].ascii_str() +							return s.error('`$x` is not a hex digit') +						} +						codepoint << s.text[s.pos] +					} +					if codepoint.len != 4 { +						return s.error('unicode escape must have 4 hex digits') +					} +					val := u32(strconv.parse_uint(codepoint.bytestr(), 16, 32) or { 0 }) +					converted := utf32_to_str(val) +					converted_bytes := converted.bytes() +					chrs << converted_bytes +					unsafe { +						converted.free() +						converted_bytes.free() +						codepoint.free() +					} +					continue +				} else { +					return s.error('incomplete unicode escape') +				} +			} else if peek == `U` { +				return s.error('unicode endpoints must be in lowercase `u`') +			} else if peek == byte(229) { +				return s.error('unicode endpoint not allowed') +			} else { +				return s.error('invalid backslash escape') +			} +		} +		chrs << ch +	} +	tok := s.tokenize(chrs, .str_) +	s.move() +	if !has_closed { +		return s.error('missing double quotes in string closing') +	} +	return tok +} + +// num_scan scans and returns an int/float token. +fn (mut s Scanner) num_scan() Token { +	// analyze json number structure +	// -[digit][?[dot][digit]][?[E/e][?-/+][digit]] +	mut is_fl := false +	mut dot_index := -1 +	mut digits := []byte{} +	if s.text[s.pos] == `-` { +		digits << `-` +		if !s.text[s.pos + 1].is_digit() { +			return s.invalid_token() +		} +		s.move_pos_with_newlines() +	} +	if s.text[s.pos] == `0` && (s.pos + 1 < s.text.len && s.text[s.pos + 1].is_digit()) { +		return s.error('leading zeroes in a number are not allowed') +	} +	for s.pos < s.text.len && (s.text[s.pos].is_digit() || (!is_fl && s.text[s.pos] == `.`)) { +		digits << s.text[s.pos] +		if s.text[s.pos] == `.` { +			is_fl = true +			dot_index = digits.len - 1 +		} +		s.move_pos_with_newlines() +	} +	if dot_index + 1 < s.text.len && digits[dot_index + 1..].len == 0 { +		return s.error('invalid float') +	} +	if s.pos < s.text.len && (s.text[s.pos] == `e` || s.text[s.pos] == `E`) { +		digits << s.text[s.pos] +		s.move_pos_with_newlines() +		if s.pos < s.text.len && s.text[s.pos] in json2.exp_signs { +			digits << s.text[s.pos] +			s.move_pos_with_newlines() +		} +		mut exp_digits_count := 0 +		for s.pos < s.text.len && s.text[s.pos].is_digit() { +			digits << s.text[s.pos] +			exp_digits_count++ +			s.move_pos_with_newlines() +		} +		if exp_digits_count == 0 { +			return s.error('invalid exponent') +		} +	} +	kind := if is_fl { TokenKind.float } else { TokenKind.int_ } +	return s.tokenize(digits, kind) +} + +// invalid_token returns an error token with the invalid token message. +fn (s Scanner) invalid_token() Token { +	if s.text[s.pos] >= 32 && s.text[s.pos] <= 126 { +		x := s.text[s.pos].ascii_str() +		return s.error('invalid token `$x`') +	} else { +		x := s.text[s.pos].str_escaped() +		return s.error('invalid token `$x`') +	} +} + +// scan returns a token based on the scanner's current position. +[manualfree] +fn (mut s Scanner) scan() Token { +	if s.pos < s.text.len && (s.text[s.pos] == ` ` || s.text[s.pos] in json2.newlines) { +		s.move() +	} +	if s.pos >= s.text.len { +		return s.tokenize([]byte{}, .eof) +	} else if s.pos + 3 < s.text.len && (s.text[s.pos] == `t` || s.text[s.pos] == `n`) { +		ident := s.text[s.pos..s.pos + 4].bytestr() +		if ident == 'true' || ident == 'null' { +			mut kind := TokenKind.null +			if ident == 'true' { +				kind = .bool_ +			} +			unsafe { ident.free() } +			val := s.text[s.pos..s.pos + 4] +			tok := s.tokenize(val, kind) +			s.move() // n / t +			s.move() // u / r +			s.move() // l / u +			s.move() // l / e +			return tok +		} +		unsafe { ident.free() } +		return s.invalid_token() +	} else if s.pos + 4 < s.text.len && s.text[s.pos] == `f` { +		ident := s.text[s.pos..s.pos + 5].bytestr() +		if ident == 'false' { +			unsafe { ident.free() } +			val := s.text[s.pos..s.pos + 5] +			tok := s.tokenize(val, .bool_) +			s.move() // f +			s.move() // a +			s.move() // l +			s.move() // s +			s.move() // e +			return tok +		} +		unsafe { ident.free() } +		return s.invalid_token() +	} else if s.text[s.pos] in json2.char_list { +		chr := s.text[s.pos] +		tok := s.tokenize([]byte{}, TokenKind(int(chr))) +		s.move() +		return tok +	} else if s.text[s.pos] == `"` { +		return s.text_scan() +	} else if s.text[s.pos].is_digit() || s.text[s.pos] == `-` { +		return s.num_scan() +	} else { +		return s.invalid_token() +	} +} diff --git a/v_windows/v/old/vlib/x/json2/scanner_test.v b/v_windows/v/old/vlib/x/json2/scanner_test.v new file mode 100644 index 0000000..73f4d79 --- /dev/null +++ b/v_windows/v/old/vlib/x/json2/scanner_test.v @@ -0,0 +1,351 @@ +module json2 + +fn test_str() { +	mut sc := Scanner{ +		text: '"test"'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .str_ +	assert tok.lit.len == 4 +	assert tok.lit.bytestr() == 'test' +} + +fn test_str_valid_unicode_escape() { +	mut sc := Scanner{ +		text: r'"\u0048"'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .str_ +	assert tok.lit.len == 1 +	assert tok.lit.bytestr() == 'H' +} + +fn test_str_valid_unicode_escape_2() { +	mut sc := Scanner{ +		text: r'"\u2714"'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .str_ +	assert tok.lit.len == 3 +	assert tok.lit.bytestr() == '✔' +} + +fn test_str_invalid_escape() { +	mut sc := Scanner{ +		text: r'"\z"'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .error +	assert tok.lit.bytestr() == 'invalid backslash escape' +} + +fn test_str_invalid_must_be_escape() { +	for char in important_escapable_chars { +		mut sc := Scanner{ +			text: [byte(`"`), `t`, char, `"`] +		} +		tok := sc.scan() +		assert tok.kind == .error +		assert tok.lit.bytestr() == 'character must be escaped with a backslash' +	} +} + +fn test_str_invalid_unicode_escape() { +	mut sc := Scanner{ +		text: r'"\u010G"'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .error +	assert tok.lit.bytestr() == '`G` is not a hex digit' +} + +fn test_str_invalid_unicode_escape_len() { +	mut sc := Scanner{ +		text: r'"\u001"'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .error +	assert tok.lit.bytestr() == 'unicode escape must have 4 hex digits' +} + +fn test_str_invalid_uppercase_u() { +	mut sc := Scanner{ +		text: r'"\U0000"'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .error +	assert tok.lit.bytestr() == 'unicode endpoints must be in lowercase `u`' +} + +fn test_str_missing_closing_bracket() { +	mut sc := Scanner{ +		text: '"incomplete'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .error +	assert tok.lit.bytestr() == 'missing double quotes in string closing' +} + +fn test_int() { +	mut sc := Scanner{ +		text: '10'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .int_ +	assert tok.lit.len == 2 +	assert tok.lit.bytestr() == '10' +} + +fn test_int_negative() { +	mut sc := Scanner{ +		text: '-10'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .int_ +	assert tok.lit.len == 3 +	assert tok.lit.bytestr() == '-10' +} + +fn test_float() { +	mut sc := Scanner{ +		text: '123.400'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .float +	assert tok.lit.len == 7 +	assert tok.lit.bytestr() == '123.400' +} + +fn test_float_negative() { +	mut sc := Scanner{ +		text: '-123.400'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .float +	assert tok.lit.len == 8 +	assert tok.lit.bytestr() == '-123.400' +} + +fn test_int_exp() { +	mut sc := Scanner{ +		text: '1E22'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .int_ +	assert tok.lit.len == 4 +	assert tok.lit.bytestr() == '1E22' +} + +fn test_int_exp_negative() { +	mut sc := Scanner{ +		text: '1E-2'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .int_ +	assert tok.lit.len == 4 +	assert tok.lit.bytestr() == '1E-2' +} + +fn test_int_exp_positive() { +	mut sc := Scanner{ +		text: '1E+2'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .int_ +	assert tok.lit.len == 4 +	assert tok.lit.bytestr() == '1E+2' +} + +fn test_float_exp() { +	mut sc := Scanner{ +		text: '123.456e78'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .float +	assert tok.lit.len == 10 +	assert tok.lit.bytestr() == '123.456e78' +} + +fn test_float_exp_negative() { +	mut sc := Scanner{ +		text: '20.56e-5'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .float +	assert tok.lit.len == 8 +	assert tok.lit.bytestr() == '20.56e-5' +} + +fn test_float_exp_positive() { +	mut sc := Scanner{ +		text: '20.56e+5'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .float +	assert tok.lit.len == 8 +	assert tok.lit.bytestr() == '20.56e+5' +} + +fn test_number_with_space() { +	mut sc := Scanner{ +		text: ' 4'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .int_ +	assert tok.lit.len == 1 +	assert tok.lit.bytestr() == '4' +} + +fn test_number_invalid_leading_zero() { +	mut sc := Scanner{ +		text: '0010'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .error +	assert tok.lit.bytestr() == 'leading zeroes in a number are not allowed' +} + +fn test_number_invalid_leading_zero_negative() { +	mut sc := Scanner{ +		text: '-0010'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .error +	assert tok.lit.bytestr() == 'leading zeroes in a number are not allowed' +} + +fn test_number_invalid_start_char() { +	mut sc := Scanner{ +		text: '+1'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .error +	assert tok.lit.bytestr() == 'invalid token `+`' +} + +fn test_number_invalid_char() { +	mut sc := Scanner{ +		text: '122x'.bytes() +	} +	sc.scan() +	tok := sc.scan() +	assert tok.kind == .error +	assert tok.lit.bytestr() == 'invalid token `x`' +} + +fn test_number_invalid_char_float() { +	mut sc := Scanner{ +		text: '122x.1'.bytes() +	} +	sc.scan() +	tok := sc.scan() +	assert tok.kind == .error +	assert tok.lit.bytestr() == 'invalid token `x`' +} + +fn test_number_invalid_multiple_dot() { +	mut sc := Scanner{ +		text: '122.108.10'.bytes() +	} +	sc.scan() +	tok := sc.scan() +	assert tok.kind == .error +	assert tok.lit.bytestr() == 'invalid token `.`' +} + +fn test_number_invalid_exp() { +	mut sc := Scanner{ +		text: '0.3e'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .error +	assert tok.lit.bytestr() == 'invalid exponent' +} + +fn test_number_invalid_exp_with_sign() { +	mut sc := Scanner{ +		text: '0.3e+'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .error +	assert tok.lit.bytestr() == 'invalid exponent' +} + +fn test_number_invalid_zero_exp() { +	mut sc := Scanner{ +		text: '0e'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .error +	assert tok.lit.bytestr() == 'invalid exponent' +} + +fn test_number_invalid_dot_exp() { +	mut sc := Scanner{ +		text: '0.e'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .error +	assert tok.lit.bytestr() == 'invalid float' +} + +fn test_number_invalid_double_exp() { +	mut sc := Scanner{ +		text: '2eE'.bytes() +	} +	sc.scan() +	tok := sc.scan() +	assert tok.kind == .error +	assert tok.lit.bytestr() == 'invalid token `E`' +} + +fn test_null() { +	mut sc := Scanner{ +		text: 'null'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .null +	assert tok.lit.len == 4 +	assert tok.lit.bytestr() == 'null' +} + +fn test_bool_true() { +	mut sc := Scanner{ +		text: 'true'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .bool_ +	assert tok.lit.len == 4 +	assert tok.lit.bytestr() == 'true' +} + +fn test_bool_false() { +	mut sc := Scanner{ +		text: 'false'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .bool_ +	assert tok.lit.len == 5 +	assert tok.lit.bytestr() == 'false' +} + +fn test_json_with_whitespace_start() { +	mut sc := Scanner{ +		text: ' \n  \n\t {'.bytes() +	} +	tok := sc.scan() +	eprintln(tok) +	assert tok.kind == .lcbr +	assert tok.lit.len == 0 +} + +fn test_json_with_whitespace_end() { +	mut sc := Scanner{ +		text: '}  \n\t'.bytes() +	} +	tok := sc.scan() +	assert tok.kind == .rcbr +	tok2 := sc.scan() +	eprintln(tok2) +	assert tok2.kind == .eof +} diff --git a/v_windows/v/old/vlib/x/ttf/README.md b/v_windows/v/old/vlib/x/ttf/README.md new file mode 100644 index 0000000..6394ac1 --- /dev/null +++ b/v_windows/v/old/vlib/x/ttf/README.md @@ -0,0 +1,310 @@ +# TTF font utility +## introduction +This module is designed to perform two main task +- Load the font file +- Render text using a TTF font + +The render system can be single or multiple, for example it is possible to have a bitmap +render and a HW accelerated render. + +## TTF loader +This part of the module do a simple task, load a TTF file and preprocess all the loaded data +in order to simplify the rendering phase. + +Let's start with a simple snippet of code that load a font from the disk: +```v ignore +mut ttf_font := ttf.TTF_File{} +ttf_font.buf = os.read_bytes("arial.ttf") or { panic(err) } +ttf_font.init() +``` +*Note: the font must be passed to the `TTF_file` as RAM buffer.* +At this point the font "arial" is loaded and parsed and if it is a valid TTF font it is +ready for the rendering. +We can get some quick info on the font as string using the `get_info_string` function: + +```v oksyntax +println(ttf_font.get_info_string()) +``` +produces an output like this: +``` +----- Font Info ----- +font_family     : Arial +font_sub_family : Normal +full_name       : Arial +postscript_name : ArialMT +version         : 1 +font_revision   : 5.06 +magic_number    : 5f0f3cf5 +flags           : 81b +created  unixTS : 649950890 +modified unixTS : 1282151447 +units_per_em    : 2048 +box             : [x_min:-1361, y_min:-665, x_Max:4096, y_Max:2060] +mac_style       : 0 +----------------------- +``` + +Once loaded a font the `TTF_File` struct is filled with the font data and texts can be rendered. +At high level no more action are required to use the loaded font. +Multiple fonts can be loaded without problems at the same time. + +## TTF Bitmap render +In this modue it is possible to have different renders running at the same time. +At the present time all the rendering are made on the CPU, sokol is used only to draw the +rendered text to the screen. +Let's start with a simple snippet of code: +```v oksyntax +import os +import x.ttf + +[console] +fn main() { +	mut ttf_font := ttf.TTF_File{} +	ttf_font.buf = os.read_bytes('arial.ttf') or { panic(err) } +	ttf_font.init() +	// print font info +	println(ttf_font.get_info_string()) +} +``` +This simple code load a TTF font and display its basic informations. + +### draw_text +The draw text function draw simple strings without indentation or other imagination tasks. +At this point we can render a simple text: +```v oksyntax +import os +import x.ttf + +[console] +fn main() { +	mut ttf_font := ttf.TTF_File{} +	ttf_font.buf = os.read_bytes('arial.ttf') or { panic(err) } +	ttf_font.init() +	// print font info +	println(ttf_font.get_info_string()) + +	bmp_width := 200 +	bmp_heigth := 64 +	bmp_layers := 4 // number of planes for an RGBA buffer +	// memory size of the buffer +	bmp_size := bmp_width * bmp_heigth * bmp_layers + +	font_size := 32 // font size in points +	device_dpi := 72 // default screen DPI +	// Formula for scale calculation +	// scaler := (font_size * device dpi) / (72dpi * em_unit) +	scale := f32(font_size * device_dpi) / f32(72 * ttf_font.units_per_em) +	// height of the font to use in the buffer to separate the lines +	y_base := int((ttf_font.y_max - ttf_font.y_min) * scale) + +	// declare the bitmap struct +	mut bmp := ttf.BitMap{ +		tf: &ttf_font +		buf: malloc(bmp_size) +		buf_size: bmp_size +		width: bmp_width +		height: bmp_heigth +		bp: bmp_layers +		color: 0x000000_FF // RGBA black +		scale: scale +	} +	bmp.init_filler() +	bmp.clear() +	bmp.set_pos(10, y_base) +	bmp.draw_text('Test Text!') +	bmp.save_as_ppm('test.ppm') +} +``` +This is the low level render that draw ther text on a bitmap and save the bitmap on a disk as +`.ppm` file. +*Note: The render in this case is a raw rendering without any postfiltering or other processing.* + +Using the low level rendering you need to manage all the amenities like allocate and release +memory and other tasks like calc the character dimensions. + +You can specify the style for the text rendering in the `BitMap` struct:: +```v +enum Style { +	outline +	outline_aliased +	filled // default syle +	raw +} +``` +Use this level only if you want achieve particular result on text rendering. + +### draw_text_block +Draw text block draw a justified and indented block of multiline text in the bitmap. +```v oksyntax +import os +import x.ttf + +[console] +fn main() { +	mut ttf_font := ttf.TTF_File{} +	ttf_font.buf = os.read_bytes('arial.ttf') or { panic(err) } +	ttf_font.init() +	// print font info +	println(ttf_font.get_info_string()) + +	bmp_width := 200 +	bmp_heigth := 200 +	bmp_layers := 4 // number of planes for an RGBA buffer +	// memory size of the buffer +	bmp_size := bmp_width * bmp_heigth * bmp_layers + +	font_size := 32 // font size in points +	device_dpi := 72 // default screen DPI +	// Formula for scale calculation +	// scaler := (font_size * device dpi) / (72dpi * em_unit) +	scale := f32(font_size * device_dpi) / f32(72 * ttf_font.units_per_em) +	// height of the font to use in the buffer to separate the lines +	y_base := int((ttf_font.y_max - ttf_font.y_min) * scale) + +	text := "Today it is a good day! +Tomorrow I'm not so sure :( +But Vwill prevail for sure, V is the way!! +òàèì@ò!£$%& +" +	// declare the bitmap struct +	mut bmp := ttf.BitMap{ +		tf: &ttf_font +		buf: malloc(bmp_size) +		buf_size: bmp_size +		width: bmp_width +		height: bmp_heigth +		bp: bmp_layers +		color: 0x000000_FF // RGBA black +		scale: scale +	} +	bmp.init_filler() +	bmp.clear() +	bmp.justify = true +	bmp.align = .left +	bmp.draw_text_block(text, x: 0, y: 0, w: bmp_width - 20, h: bmp_heigth) +	bmp.save_as_ppm('test.ppm') +} +``` +This is the low level render that draw text block on the bitmap. +A text block is defined from a `Text_block` struct: +```v +struct Text_block { +	x         int  // x postion of the left high corner +	y         int  // y postion of the left high corner +	w         int  // width of the text block +	h         int  // heigth of the text block +	cut_lines bool = true // force to cut the line if the length is over the text block width +} +``` +and use the following bitmap fields: +```v ignore +	style              Style      = .filled // default syle +	align              Text_align = .left   // default text align +	justify            bool				    // justify text flag, default deactivated +	justify_fill_ratio f32        = 0.5     // justify fill ratio, if the ratio of the filled +	                                        // row is >= of this then justify the text +``` + +It is possible to modify these parameters to obtain the desired effect on the text rendering. + +## TTF Sokol render +The sokol render use the  bitmap render to create the text and the `gg` functions to render +the text to the screen. +It is mor esimpel to use in a `gg app` that the raw bitmap render. +Each single text rendered need its own reder to be declared, after you can modify it. +Here a simple example of the usage: +```v oksyntax +import gg +import gx +import sokol.sapp +import sokol.sgl +import x.ttf +import os + +const ( +	win_width  = 600 +	win_height = 700 +	bg_color   = gx.white +	font_paths = [ +		'arial.ttf', +	] +) + +struct App_data { +pub mut: +	gg        &gg.Context +	sg_img    C.sg_image +	init_flag bool +	frame_c   int + +	tf         []ttf.TTF_File +	ttf_render []ttf.TTF_render_Sokol +} + +fn my_init(mut app App_data) { +	app.init_flag = true +} + +fn draw_frame(mut app App_data) { +	cframe_txt := 'Current Frame: $app.frame_c' + +	app.gg.begin() + +	sgl.defaults() +	sgl.matrix_mode_projection() +	sgl.ortho(0.0, f32(sapp.width()), f32(sapp.height()), 0.0, -1.0, 1.0) + +	// draw text only if the app is already initialized +	if app.init_flag == true { +		// update the text +		mut txt1 := &app.ttf_render[0] +		txt1.destroy_texture() +		txt1.create_text(cframe_txt, 43) +		txt1.create_texture() +		txt1.draw_text_bmp(app.gg, 30, 60) +	} +	app.frame_c++ +	app.gg.end() +} + +[console] +fn main() { +	mut app := &App_data{ +		gg: 0 +	} + +	app.gg = gg.new_context( +		width: win_width +		height: win_height +		create_window: true +		window_title: 'Test TTF module' +		user_data: app +		bg_color: bg_color +		frame_fn: draw_frame +		init_fn: my_init +	) + +	// load TTF fonts +	for font_path in font_paths { +		mut tf := ttf.TTF_File{} +		tf.buf = os.read_bytes(font_path) or { panic(err) } +		println('TrueTypeFont file [$font_path] len: $tf.buf.len') +		tf.init() +		println(tf.get_info_string()) +		app.tf << tf +	} + +	// TTF render 0 Frame counter +	app.ttf_render << &ttf.TTF_render_Sokol{ +		bmp: &ttf.BitMap{ +			tf: &(app.tf[0]) +			buf: unsafe { malloc(32000000) } +			buf_size: (32000000) +			color: 0xFF0000FF +			// style: .raw +		} +	} + +	app.gg.run() +} +``` diff --git a/v_windows/v/old/vlib/x/ttf/common.v b/v_windows/v/old/vlib/x/ttf/common.v new file mode 100644 index 0000000..ad88d33 --- /dev/null +++ b/v_windows/v/old/vlib/x/ttf/common.v @@ -0,0 +1,205 @@ +module ttf + +/********************************************************************** +* +* Common data for the module +* +* Copyright (c) 2021 Dario Deledda. All rights reserved. +* Use of this source code is governed by an MIT license +* that can be found in the LICENSE file. +* +* Note: +* +* TODO: +**********************************************************************/ +import os +import math + +// text align +pub enum Text_align { +	left +	center +	right +	justify +} + +// draw style +pub enum Style { +	outline +	outline_aliased +	filled +	raw +} + +/****************************************************************************** +* +* DEBUG Utility +* +******************************************************************************/ +const debug_flag = false + +fn dprintln(txt string) { +	if ttf.debug_flag { +		println(txt) +	} +} + +/****************************************************************************** +* +* Utility +* +******************************************************************************/ +// transform the bitmap from one layer to color layers +fn (mut bmp BitMap) format_texture() { +	r := byte(bmp.color >> 24) +	g := byte((bmp.color >> 16) & 0xFF) +	b := byte((bmp.color >> 8) & 0xFF) +	a := byte(bmp.color & 0xFF) + +	b_r := byte(bmp.bg_color >> 24) +	b_g := byte((bmp.bg_color >> 16) & 0xFF) +	b_b := byte((bmp.bg_color >> 8) & 0xFF) +	b_a := byte(bmp.bg_color & 0xFF) + +	// trasform buffer in a texture +	x := bmp.buf +	unsafe { +		mut i := 0 +		for i < bmp.buf_size { +			data := x[i] +			if data > 0 { +				x[i + 0] = r +				x[i + 1] = g +				x[i + 2] = b +				// alpha +				x[i + 3] = byte((a * data) >> 8) +			} else { +				x[i + 0] = b_r +				x[i + 1] = b_g +				x[i + 2] = b_b +				x[i + 3] = b_a +			} +			i += 4 +		} +	} +} + +// write out a .ppm file +pub fn (mut bmp BitMap) save_as_ppm(file_name string) { +	tmp_buf := bmp.buf +	mut buf := unsafe { malloc_noscan(bmp.buf_size) } +	unsafe { C.memcpy(buf, tmp_buf, bmp.buf_size) } +	bmp.buf = buf + +	bmp.format_texture() +	npixels := bmp.width * bmp.height +	mut f_out := os.create(file_name) or { panic(err) } +	f_out.writeln('P3') or { panic(err) } +	f_out.writeln('$bmp.width $bmp.height') or { panic(err) } +	f_out.writeln('255') or { panic(err) } +	for i in 0 .. npixels { +		pos := i * bmp.bp +		unsafe { +			c_r := bmp.buf[pos] +			c_g := bmp.buf[pos + 1] +			c_b := bmp.buf[pos + 2] +			f_out.write_string('$c_r $c_g $c_b ') or { panic(err) } +		} +	} +	f_out.close() + +	unsafe { +		free(buf) +	} +	bmp.buf = tmp_buf +} + +pub fn (mut bmp BitMap) get_raw_bytes() []byte { +	mut f_buf := []byte{len: bmp.buf_size / 4} +	mut i := 0 +	for i < bmp.buf_size { +		unsafe { +			f_buf[i >> 2] = *(bmp.buf + i) +		} +		i += 4 +	} +	return f_buf +} + +pub fn (mut bmp BitMap) save_raw_data(file_name string) { +	os.write_file_array(file_name, bmp.get_raw_bytes()) or { panic(err) } +} + +// +// Math functions +// +// integer part of x +[inline] +fn ipart(x f32) f32 { +	return f32(math.floor(x)) +} + +[inline] +fn round(x f32) f32 { +	return ipart(x + 0.5) +} + +// fractional part of x +[inline] +fn fpart(x f32) f32 { +	return x - f32(math.floor(x)) +} + +[inline] +fn rfpart(x f32) f32 { +	return 1 - fpart(x) +} + +/****************************************************************************** +* +* Colors +* +******************************************************************************/ +/* +[inline] +pub fn (mut dev BitMap) get_color(x int, y int) (int, int, int, int){ +	if x < 0 || x >= dev.width || y < 0 || y >= dev.height { +		return 0,0,0,0 +	} +	mut i := (x + y * dev.width)*dev.bp +	unsafe{ +		return dev.buf[i], dev.buf[i+1], dev.buf[i+2], dev.buf[i+3] +	} +} + +[inline] +pub fn (mut dev BitMap) get_color_u32(x int, y int) u32{ +	r, g, b, a := dev.get_color(x, y) +	unsafe{ +		return u32(r<<24) | u32(g<<16) | u32(b<<8) | u32(a) +	} +} +*/ +/****************************************************************************** +* +* Drawing +* +******************************************************************************/ +[inline] +pub fn color_multiply_alpha(c u32, level f32) u32 { +	return u32(f32(c & 0xFF) * level) +} + +[inline] +pub fn color_multiply(c u32, level f32) u32 { +	mut r := (f32((c >> 24) & 0xFF) / 255.0) * level +	mut g := (f32((c >> 16) & 0xFF) / 255.0) * level +	mut b := (f32((c >> 8) & 0xFF) / 255.0) * level +	mut a := (f32(c & 0xFF) / 255.0) * level +	r = if r > 1.0 { 1.0 } else { r } +	g = if g > 1.0 { 1.0 } else { g } +	b = if b > 1.0 { 1.0 } else { b } +	a = if a > 1.0 { 1.0 } else { a } + +	return (u32(r * 255) << 24) | (u32(g * 255) << 16) | (u32(b * 255) << 8) | u32(a * 255) +} diff --git a/v_windows/v/old/vlib/x/ttf/render_bmp.v b/v_windows/v/old/vlib/x/ttf/render_bmp.v new file mode 100644 index 0000000..c0cf6dc --- /dev/null +++ b/v_windows/v/old/vlib/x/ttf/render_bmp.v @@ -0,0 +1,825 @@ +module ttf + +/********************************************************************** +* +* BMP render module utility functions +* +* Copyright (c) 2021 Dario Deledda. All rights reserved. +* Use of this source code is governed by an MIT license +* that can be found in the LICENSE file. +* +* Note: +* +* TODO: +* - manage text directions R to L +**********************************************************************/ +import encoding.utf8 +import math +import math.mathutil as mu + +pub struct BitMap { +pub mut: +	tf       &TTF_File +	buf      &byte = 0 // pointer to the memory buffer +	buf_size int   // allocated buf size in bytes +	width    int = 1 // width of the buffer +	height   int = 1 // height of the buffer +	bp       int = 4 // byte per pixel of the buffer +	bg_color u32 = 0xFFFFFF_00 // background RGBA format +	color    u32 = 0x000000_FF // RGBA format +	scale    f32 = 1.0 // internal usage!! +	scale_x  f32 = 1.0 // X scale of the single glyph +	scale_y  f32 = 1.0 // Y scale of the single glyph +	angle    f32 = 0.0 // angle of rotation of the bitmap +	// spaces +	space_cw   f32 = 1.0 // width of the space glyph internal usage!! +	space_mult f32 = f32(0.0) // 1.0/16.0  // space between letter, is a multiplier for a standrd space ax +	// used only by internal text rendering!! +	tr_matrix          []f32      = [f32(1), 0, 0, 0, 1, 0, 0, 0, 0] // transformation matrix +	ch_matrix          []f32      = [f32(1), 0, 0, 0, 1, 0, 0, 0, 0] // character matrix +	style              Style      = .filled // default syle +	align              Text_align = .left // default text align +	justify            bool // justify text flag, default deactivated +	justify_fill_ratio f32 = 0.5 // justify fill ratio, if the ratio of the filled row is >= of this then justify the text +	filler             [][]int    // filler buffer for the renderer +	// flag to force font embedded metrics +	use_font_metrics bool +} + +/****************************************************************************** +* +* Utility +* +******************************************************************************/ +// clear clear the bitmap with 0 bytes +pub fn (mut bmp BitMap) clear() { +	mut sz := bmp.width * bmp.height * bmp.bp +	unsafe { +		C.memset(bmp.buf, 0x00, sz) +	} +} + +// transform matrix applied to the text +fn (bmp &BitMap) trf_txt(p &Point) (int, int) { +	return int(p.x * bmp.tr_matrix[0] + p.y * bmp.tr_matrix[3] + bmp.tr_matrix[6]), int( +		p.x * bmp.tr_matrix[1] + p.y * bmp.tr_matrix[4] + bmp.tr_matrix[7]) +} + +// transform matrix applied to the char +fn (bmp &BitMap) trf_ch(p &Point) (int, int) { +	return int(p.x * bmp.ch_matrix[0] + p.y * bmp.ch_matrix[3] + bmp.ch_matrix[6]), int( +		p.x * bmp.ch_matrix[1] + p.y * bmp.ch_matrix[4] + bmp.ch_matrix[7]) +} + +// set draw postion in the buffer +pub fn (mut bmp BitMap) set_pos(x f32, y f32) { +	bmp.tr_matrix[6] = x +	bmp.tr_matrix[7] = y +} + +// set the rotation angle in radiants +pub fn (mut bmp BitMap) set_rotation(a f32) { +	bmp.tr_matrix[0] = f32(math.cos(a)) // 1 +	bmp.tr_matrix[1] = f32(-math.sin(a)) // 0 +	bmp.tr_matrix[3] = f32(math.sin(a)) // 0 +	bmp.tr_matrix[4] = f32(math.cos(a)) // 1 +} + +/****************************************************************************** +* +* Filler functions +* +******************************************************************************/ +pub fn (mut bmp BitMap) init_filler() { +	h := bmp.height - bmp.filler.len +	if h < 1 { +		return +	} +	for _ in 0 .. h { +		bmp.filler << []int{len: 4} +	} +	// dprintln("Init filler: ${bmp.filler.len} rows") +} + +pub fn (mut bmp BitMap) clear_filler() { +	for i in 0 .. bmp.height { +		bmp.filler[i].clear() +	} +} + +pub fn (mut bmp BitMap) exec_filler() { +	for y in 0 .. bmp.height { +		if bmp.filler[y].len > 0 { +			bmp.filler[y].sort() +			if bmp.filler[y].len & 1 != 0 { +				// dprintln("even line!! $y => ${bmp.filler[y]}") +				continue +			} +			mut index := 0 +			for index < bmp.filler[y].len { +				startx := bmp.filler[y][index] + 1 +				endx := bmp.filler[y][index + 1] +				if startx >= endx { +					index += 2 +					continue +				} +				for x in startx .. endx { +					bmp.plot(x, y, bmp.color) +				} +				index += 2 +			} +		} +	} +} + +pub fn (mut bmp BitMap) fline(in_x0 int, in_y0 int, in_x1 int, in_y1 int, c u32) { +	mut x0 := f32(in_x0) +	mut x1 := f32(in_x1) +	mut y0 := f32(in_y0) +	mut y1 := f32(in_y1) +	mut tmp := f32(0) + +	// check bounds +	if (in_x0 < 0 && in_x1 < 0) || (in_x0 > bmp.width && in_x1 > bmp.width) { +		return +	} + +	if y1 < y0 { +		tmp = x0 +		x0 = x1 +		x1 = tmp + +		tmp = y0 +		y0 = y1 +		y1 = tmp +	} + +	mut dx := x1 - x0 +	mut dy := y1 - y0 + +	if dy == 0 { +		if in_y0 >= 0 && in_y0 < bmp.filler.len { +			if in_x0 <= in_x1 { +				bmp.filler[in_y0] << in_x0 +				bmp.filler[in_y0] << in_x1 +			} else { +				bmp.filler[in_y0] << in_x1 +				bmp.filler[in_y0] << in_x0 +			} +		} +		return +	} +	mut n := dx / dy +	for y in 0 .. int(dy + 0.5) { +		yd := int(y + y0) +		x := n * y + x0 +		if x > bmp.width || yd >= bmp.filler.len { +			break +		} +		if yd >= 0 && yd < bmp.filler.len { +			bmp.filler[yd] << int(x + 0.5) +			// bmp.plot(int(x+0.5), yd, bmp.color) +		} +	} +} + +/****************************************************************************** +* +* Draw functions +* +******************************************************************************/ +[inline] +pub fn (mut bmp BitMap) plot(x int, y int, c u32) bool { +	if x < 0 || x >= bmp.width || y < 0 || y >= bmp.height { +		return false +	} +	mut index := (x + y * bmp.width) * bmp.bp +	unsafe { +		// bmp.buf[index]=0xFF +		bmp.buf[index] = byte(c & 0xFF) // write only the alpha +	} +	/* +	for count in 0..(bmp.bp) { +		unsafe{ +			bmp.buf[index + count] = byte((c >> (bmp.bp - count - 1) * 8) & 0x0000_00FF) +		} +	} +	*/ +	return true +} + +/****************************************************************************** +* +* smooth draw functions +* +******************************************************************************/ +// aline draw an aliased line on the bitmap +pub fn (mut bmp BitMap) aline(in_x0 int, in_y0 int, in_x1 int, in_y1 int, c u32) { +	// mut c1 := c +	mut x0 := f32(in_x0) +	mut x1 := f32(in_x1) +	mut y0 := f32(in_y0) +	mut y1 := f32(in_y1) +	mut tmp := f32(0) + +	mut dx := x1 - x0 +	mut dy := y1 - y0 + +	dist := f32(0.4) + +	if mu.abs(dx) > mu.abs(dy) { +		if x1 < x0 { +			tmp = x0 +			x0 = x1 +			x1 = tmp + +			tmp = y0 +			y0 = y1 +			y1 = tmp +		} +		dx = x1 - x0 +		dy = y1 - y0 + +		x0 += 0.5 +		y0 += 0.5 + +		m := dy / dx +		mut x := x0 +		for x <= x1 + 0.5 { +			y := m * (x - x0) + y0 +			e := 1 - mu.abs(y - 0.5 - int(y)) +			bmp.plot(int(x), int(y), color_multiply_alpha(c, e * 0.75)) + +			ys1 := y + dist +			if int(ys1) != int(y) { +				v1 := mu.abs(ys1 - y) / dist * (1 - e) +				bmp.plot(int(x), int(ys1), color_multiply_alpha(c, v1)) +			} + +			ys2 := y - dist +			if int(ys2) != int(y) { +				v2 := mu.abs(y - ys2) / dist * (1 - e) +				bmp.plot(int(x), int(ys2), color_multiply_alpha(c, v2)) +			} + +			x += 1.0 +		} +	} else { +		if y1 < y0 { +			tmp = x0 +			x0 = x1 +			x1 = tmp + +			tmp = y0 +			y0 = y1 +			y1 = tmp +		} +		dx = x1 - x0 +		dy = y1 - y0 + +		x0 += 0.5 +		y0 += 0.5 + +		n := dx / dy +		mut y := y0 +		for y <= y1 + 0.5 { +			x := n * (y - y0) + x0 +			e := f32(1 - mu.abs(x - 0.5 - int(x))) +			bmp.plot(int(x), int(y), color_multiply_alpha(c, f32(e * 0.75))) + +			xs1 := x + dist +			if int(xs1) != int(x) { +				v1 := mu.abs(xs1 - x) / dist * (1 - e) +				bmp.plot(int(xs1), int(y), color_multiply_alpha(c, f32(v1))) +			} + +			xs2 := x - dist +			if int(xs2) != int(x) { +				v2 := mu.abs(x - xs1) / dist * (1 - e) +				bmp.plot(int(xs2), int(y), color_multiply_alpha(c, f32(v2))) +			} +			y += 1.0 +		} +	} +} + +/****************************************************************************** +* +* draw functions +* +******************************************************************************/ +pub fn (mut bmp BitMap) line(in_x0 int, in_y0 int, in_x1 int, in_y1 int, c u32) { +	// outline with aliased borders +	if bmp.style == .outline_aliased { +		bmp.aline(in_x0, in_y0, in_x1, in_y1, c) +		return +	} +	// filled with aliased borders +	else if bmp.style == .filled { +		bmp.aline(in_x0, in_y0, in_x1, in_y1, c) +		bmp.fline(in_x0, in_y0, in_x1, in_y1, c) +		return +	} +	// only the filler is drawn +	else if bmp.style == .raw { +		bmp.fline(in_x0, in_y0, in_x1, in_y1, c) +		return +	} +	// if we are here we are drawing an outlined border + +	x0 := int(in_x0) +	x1 := int(in_x1) +	y0 := int(in_y0) +	y1 := int(in_y1) +	// dprintln("line[$x0,$y0,$x1,$y1]") + +	mut x := x0 +	mut y := y0 + +	dx := mu.abs(x1 - x0) +	sx := if x0 < x1 { 1 } else { -1 } +	dy := -mu.abs(y1 - y0) +	sy := if y0 < y1 { 1 } else { -1 } + +	// verical line +	if dx == 0 { +		if y0 < y1 { +			for yt in y0 .. y1 + 1 { +				bmp.plot(x0, yt, c) +			} +			return +		} +		for yt in y1 .. y0 + 1 { +			bmp.plot(x0, yt, c) +		} +		// horizontal line +		return +	} else if dy == 0 { +		if x0 < x1 { +			for xt in x0 .. x1 + 1 { +				bmp.plot(xt, y0, c) +			} +			return +		} +		for xt in x1 .. x0 + 1 { +			bmp.plot(xt, y0, c) +		} +		return +	} + +	mut err := dx + dy // error value e_xy +	for { +		// bmp.plot(x, y, u32(0xFF00)) +		bmp.plot(x, y, c) + +		// dprintln("$x $y [$x0,$y0,$x1,$y1]") +		if x == x1 && y == y1 { +			break +		} +		e2 := 2 * err +		if e2 >= dy { // e_xy+e_x > 0 +			err += dy +			x += sx +		} +		if e2 <= dx { // e_xy+e_y < 0 +			err += dx +			y += sy +		} +	} +} + +pub fn (mut bmp BitMap) box(in_x0 int, in_y0 int, in_x1 int, in_y1 int, c u32) { +	bmp.line(in_x0, in_y0, in_x1, in_y0, c) +	bmp.line(in_x1, in_y0, in_x1, in_y1, c) +	bmp.line(in_x0, in_y1, in_x1, in_y1, c) +	bmp.line(in_x0, in_y0, in_x0, in_y1, c) +} + +pub fn (mut bmp BitMap) quadratic(in_x0 int, in_y0 int, in_x1 int, in_y1 int, in_cx int, in_cy int, c u32) { +	/* +	x0 := int(in_x0 * bmp.scale) +	x1 := int(in_x1 * bmp.scale) +	y0 := int(in_y0 * bmp.scale) +	y1 := int(in_y1 * bmp.scale) +	cx := int(in_cx * bmp.scale) +	cy := int(in_cy * bmp.scale) +	*/ +	x0 := int(in_x0) +	x1 := int(in_x1) +	y0 := int(in_y0) +	y1 := int(in_y1) +	cx := int(in_cx) +	cy := int(in_cy) + +	mut division := f64(1.0) +	dx := mu.abs(x0 - x1) +	dy := mu.abs(y0 - y1) + +	// if few pixel draw a simple line +	// if dx == 0 && dy == 0 { +	if dx <= 2 || dy <= 2 { +		// bmp.plot(x0, y0, c) +		bmp.line(x0, y0, x1, y1, c) +		return +	} + +	division = 1.0 / (f64(if dx > dy { dx } else { dy })) + +	// division = 0.1   // 10 division +	// division = 0.25  // 4 division + +	// dprintln("div: $division") + +	/* +	----- Bezier quadratic form ----- +	t = 0.5; // given example value, half length of the curve +	x = (1 - t) * (1 - t) * p[0].x + 2 * (1 - t) * t * p[1].x + t * t * p[2].x; +	y = (1 - t) * (1 - t) * p[0].y + 2 * (1 - t) * t * p[1].y + t * t * p[2].y; +	--------------------------------- +	*/ + +	mut x_old := x0 +	mut y_old := y0 +	mut t := 0.0 + +	for t <= (1.0 + division / 2.0) { +		s := 1.0 - t +		x := s * s * x0 + 2.0 * s * t * cx + t * t * x1 +		y := s * s * y0 + 2.0 * s * t * cy + t * t * y1 +		xi := int(x + 0.5) +		yi := int(y + 0.5) +		// bmp.plot(xi, yi, c) +		bmp.line(x_old, y_old, xi, yi, c) +		x_old = xi +		y_old = yi +		t += division +	} +} + +/****************************************************************************** +* +* TTF Query functions +* +******************************************************************************/ +pub fn (mut bmp BitMap) get_chars_bbox(in_string string) []int { +	mut res := []int{} +	mut w := 0 + +	mut space_cw, _ := bmp.tf.get_horizontal_metrics(u16(` `)) +	div_space_cw := int((f32(space_cw) * bmp.space_mult) * bmp.scale) +	space_cw = int(space_cw * bmp.scale) + +	bmp.tf.reset_kern() + +	mut i := 0 +	for i < in_string.len { +		mut char := u16(in_string[i]) + +		// draw the space +		if int(char) == 32 { +			w += int(space_cw * bmp.space_cw) +			i++ +			continue +		} +		// manage unicode chars like latin greek etc +		c_len := ((0xe5000000 >> ((char >> 3) & 0x1e)) & 3) + 1 +		if c_len > 1 { +			tmp_char := utf8.get_uchar(in_string, i) +			// dprintln("tmp_char: ${tmp_char.hex()}") +			char = u16(tmp_char) +		} + +		c_index := bmp.tf.map_code(int(char)) +		// Glyph not found +		if c_index == 0 { +			w += int(space_cw * bmp.space_cw) +			i += c_len +			continue +		} + +		ax, ay := bmp.tf.next_kern(c_index) +		// dprintln("char_index: $c_index ax: $ax ay: $ay") + +		// cw, lsb := bmp.tf.get_horizontal_metrics(u16(char)) +		// dprintln("metrics: [${u16(char):c}] cw:$cw lsb:$lsb") + +		//----- Calc Glyph transformations ----- +		mut x0 := w + int(ax * bmp.scale) +		mut y0 := 0 + int(ay * bmp.scale) + +		p := Point{x0, y0, false} +		x1, y1 := bmp.trf_txt(p) +		// init ch_matrix +		bmp.ch_matrix[0] = bmp.tr_matrix[0] * bmp.scale * bmp.scale_x +		bmp.ch_matrix[1] = bmp.tr_matrix[1] * bmp.scale * bmp.scale_x +		bmp.ch_matrix[3] = bmp.tr_matrix[3] * -bmp.scale * bmp.scale_y +		bmp.ch_matrix[4] = bmp.tr_matrix[4] * -bmp.scale * bmp.scale_y +		bmp.ch_matrix[6] = int(x1) +		bmp.ch_matrix[7] = int(y1) + +		// x_min, x_max, y_min, y_max := bmp.tf.read_glyph_dim(c_index) +		x_min, x_max, _, _ := bmp.tf.read_glyph_dim(c_index) +		//----------------- + +		width := int((mu.abs(x_max + x_min) + ax) * bmp.scale) +		// width := int((cw+ax) * bmp.scale) +		w += width + div_space_cw +		h := int(mu.abs(int(bmp.tf.y_max - bmp.tf.y_min)) * bmp.scale) +		res << w +		res << h + +		i += c_len +	} +	return res +} + +pub fn (mut bmp BitMap) get_bbox(in_string string) (int, int) { +	mut w := 0 + +	mut space_cw, _ := bmp.tf.get_horizontal_metrics(u16(` `)) +	div_space_cw := int((f32(space_cw) * bmp.space_mult) * bmp.scale) +	space_cw = int(space_cw * bmp.scale) + +	bmp.tf.reset_kern() + +	mut i := 0 +	for i < in_string.len { +		mut char := u16(in_string[i]) + +		// draw the space +		if int(char) == 32 { +			w += int(space_cw * bmp.space_cw) +			i++ +			continue +		} +		// manage unicode chars like latin greek etc +		c_len := ((0xe5000000 >> ((char >> 3) & 0x1e)) & 3) + 1 +		if c_len > 1 { +			tmp_char := utf8.get_uchar(in_string, i) +			// dprintln("tmp_char: ${tmp_char.hex()}") +			char = u16(tmp_char) +		} + +		c_index := bmp.tf.map_code(int(char)) +		// Glyph not found +		if c_index == 0 { +			w += int(space_cw * bmp.space_cw) +			i += c_len +			continue +		} +		ax, ay := bmp.tf.next_kern(c_index) +		// dprintln("char_index: $c_index ax: $ax ay: $ay") + +		// cw, lsb := bmp.tf.get_horizontal_metrics(u16(char)) +		// dprintln("metrics: [${u16(char):c}] cw:$cw lsb:$lsb") + +		//----- Calc Glyph transformations ----- +		mut x0 := w + int(ax * bmp.scale) +		mut y0 := 0 + int(ay * bmp.scale) + +		p := Point{x0, y0, false} +		x1, y1 := bmp.trf_txt(p) +		// init ch_matrix +		bmp.ch_matrix[0] = bmp.tr_matrix[0] * bmp.scale * bmp.scale_x +		bmp.ch_matrix[1] = bmp.tr_matrix[1] * bmp.scale * bmp.scale_x +		bmp.ch_matrix[3] = bmp.tr_matrix[3] * -bmp.scale * bmp.scale_y +		bmp.ch_matrix[4] = bmp.tr_matrix[4] * -bmp.scale * bmp.scale_y +		bmp.ch_matrix[6] = int(x1) +		bmp.ch_matrix[7] = int(y1) + +		x_min, x_max, _, _ := bmp.tf.read_glyph_dim(c_index) +		// x_min := 1 +		// x_max := 2 +		//----------------- + +		width := int((mu.abs(x_max + x_min) + ax) * bmp.scale) +		// width := int((cw+ax) * bmp.scale) +		w += width + div_space_cw + +		i += c_len +	} + +	// dprintln("y_min: $bmp.tf.y_min y_max: $bmp.tf.y_max res: ${int((bmp.tf.y_max - bmp.tf.y_min)*buf.scale)} width: ${int( (cw) * buf.scale)}") +	// buf.box(0,y_base - int((bmp.tf.y_min)*buf.scale), int( (x_max) * buf.scale), y_base-int((bmp.tf.y_max)*buf.scale), u32(0xFF00_0000) ) +	return w, int(mu.abs(int(bmp.tf.y_max - bmp.tf.y_min)) * bmp.scale) +} + +/****************************************************************************** +* +* TTF draw glyph +* +******************************************************************************/ +fn (mut bmp BitMap) draw_notdef_glyph(in_x int, in_w int) { +	mut p := Point{in_x, 0, false} +	x1, y1 := bmp.trf_txt(p) +	// init ch_matrix +	bmp.ch_matrix[0] = bmp.tr_matrix[0] * bmp.scale * bmp.scale_x +	bmp.ch_matrix[1] = bmp.tr_matrix[1] * bmp.scale * bmp.scale_x +	bmp.ch_matrix[3] = bmp.tr_matrix[3] * -bmp.scale * bmp.scale_y +	bmp.ch_matrix[4] = bmp.tr_matrix[4] * -bmp.scale * bmp.scale_y +	bmp.ch_matrix[6] = int(x1) +	bmp.ch_matrix[7] = int(y1) +	x, y := bmp.trf_ch(p) + +	y_h := mu.abs(bmp.tf.y_max - bmp.tf.y_min) * bmp.scale * 0.5 + +	bmp.box(int(x), int(y), int(x - in_w), int(y - y_h), bmp.color) +	bmp.line(int(x), int(y), int(x - in_w), int(y - y_h), bmp.color) +	bmp.line(int(x - in_w), int(y), int(x), int(y - y_h), bmp.color) +} + +pub fn (mut bmp BitMap) draw_text(in_string string) (int, int) { +	mut w := 0 + +	mut space_cw, _ := bmp.tf.get_horizontal_metrics(u16(` `)) +	div_space_cw := int((f32(space_cw) * bmp.space_mult) * bmp.scale) +	space_cw = int(space_cw * bmp.scale) + +	bmp.tf.reset_kern() + +	mut i := 0 +	for i < in_string.len { +		mut char := u16(in_string[i]) + +		// draw the space +		if int(char) == 32 { +			w += int(space_cw * bmp.space_cw) +			i++ +			continue +		} +		// manage unicode chars like latin greek etc +		c_len := ((0xe5000000 >> ((char >> 3) & 0x1e)) & 3) + 1 +		if c_len > 1 { +			tmp_char := utf8.get_uchar(in_string, i) +			// dprintln("tmp_char: ${tmp_char.hex()}") +			char = u16(tmp_char) +		} + +		c_index := bmp.tf.map_code(int(char)) +		// Glyph not found +		if c_index == 0 { +			bmp.draw_notdef_glyph(w, int(space_cw * bmp.space_cw)) +			w += int(space_cw * bmp.space_cw) +			i += c_len +			continue +		} + +		ax, ay := bmp.tf.next_kern(c_index) +		// dprintln("char_index: $c_index ax: $ax ay: $ay") + +		cw, _ := bmp.tf.get_horizontal_metrics(u16(char)) +		// cw, lsb := bmp.tf.get_horizontal_metrics(u16(char)) +		// dprintln("metrics: [${u16(char):c}] cw:$cw lsb:$lsb") + +		//----- Draw_Glyph transformations ----- +		mut x0 := w + int(ax * bmp.scale) +		mut y0 := 0 + int(ay * bmp.scale) + +		p := Point{x0, y0, false} +		x1, y1 := bmp.trf_txt(p) +		// init ch_matrix +		bmp.ch_matrix[0] = bmp.tr_matrix[0] * bmp.scale * bmp.scale_x +		bmp.ch_matrix[1] = bmp.tr_matrix[1] * bmp.scale * bmp.scale_x +		bmp.ch_matrix[3] = bmp.tr_matrix[3] * -bmp.scale * bmp.scale_y +		bmp.ch_matrix[4] = bmp.tr_matrix[4] * -bmp.scale * bmp.scale_y +		bmp.ch_matrix[6] = int(x1) +		bmp.ch_matrix[7] = int(y1) + +		x_min, x_max := bmp.draw_glyph(c_index) +		// x_min := 1 +		// x_max := 2 +		//----------------- + +		mut width := int((mu.abs(x_max + x_min) + ax) * bmp.scale) +		if bmp.use_font_metrics { +			width = int((cw + ax) * bmp.scale) +		} +		w += width + div_space_cw +		i += c_len +	} + +	// dprintln("y_min: $bmp.tf.y_min y_max: $bmp.tf.y_max res: ${int((bmp.tf.y_max - bmp.tf.y_min)*buf.scale)} width: ${int( (cw) * buf.scale)}") +	// buf.box(0,y_base - int((bmp.tf.y_min)*buf.scale), int( (x_max) * buf.scale), y_base-int((bmp.tf.y_max)*buf.scale), u32(0xFF00_0000) ) +	return w, int(mu.abs(int(bmp.tf.y_max - bmp.tf.y_min)) * bmp.scale) +} + +pub fn (mut bmp BitMap) draw_glyph(index u16) (int, int) { +	glyph := bmp.tf.read_glyph(index) + +	if !glyph.valid_glyph { +		return 0, 0 +	} + +	if bmp.style == .filled || bmp.style == .raw { +		bmp.clear_filler() +	} + +	mut s := 0 // status +	mut c := 0 // contours count +	mut contour_start := 0 +	mut x0 := 0 +	mut y0 := 0 +	color := bmp.color // u32(0xFFFF_FF00) // RGBA white +	// color1            := u32(0xFF00_0000) // RGBA red +	// color2            := u32(0x00FF_0000) // RGBA green + +	mut sp_x := 0 +	mut sp_y := 0 +	mut point := Point{} + +	for count, point_raw in glyph.points { +		// dprintln("count: $count, state: $s pl:$glyph.points.len") +		point.x = point_raw.x +		point.y = point_raw.y + +		point.x, point.y = bmp.trf_ch(point) +		point.on_curve = point_raw.on_curve + +		if s == 0 { +			x0 = point.x +			y0 = point.y +			sp_x = x0 +			sp_y = y0 +			s = 1 // next state +			continue +		} else if s == 1 { +			if point.on_curve { +				bmp.line(x0, y0, point.x, point.y, color) +				// bmp.aline(x0, y0, point.x, point.y, u32(0xFFFF0000)) +				x0 = point.x +				y0 = point.y +			} else { +				s = 2 +			} +		} else { +			// dprintln("s==2") +			mut prev := glyph.points[count - 1] +			prev.x, prev.y = bmp.trf_ch(prev) +			if point.on_curve { +				// dprintln("HERE1") +				// ctx.quadraticCurveTo(prev.x + x, prev.y + y,point.x + x, point.y + y); +				// bmp.line(x0, y0, point.x + in_x, point.y + in_y, color1) +				// bmp.quadratic(x0, y0, point.x + in_x, point.y + in_y, prev.x + in_x, prev.y + in_y, u32(0xa0a00000)) +				bmp.quadratic(x0, y0, point.x, point.y, prev.x, prev.y, color) +				x0 = point.x +				y0 = point.y +				s = 1 +			} else { +				// dprintln("HERE2") +				// ctx.quadraticCurveTo(prev.x + x, prev.y + y, +				//            (prev.x + point.x) / 2 + x, +				//            (prev.y + point.y) / 2 + y); + +				// bmp.line(x0, y0, (prev.x + point.x)/2, (prev.y + point.y)/2, color2) +				// bmp.quadratic(x0, y0, (prev.x + point.x)/2, (prev.y + point.y)/2, prev.x, prev.y, color2) +				bmp.quadratic(x0, y0, (prev.x + point.x) / 2, (prev.y + point.y) / 2, +					prev.x, prev.y, color) +				x0 = (prev.x + point.x) / 2 +				y0 = (prev.y + point.y) / 2 +			} +		} + +		if count == glyph.contour_ends[c] { +			// dprintln("count == glyph.contour_ends[count]") +			if s == 2 { // final point was off-curve. connect to start + +				mut start_point := glyph.points[contour_start] +				start_point.x, start_point.y = bmp.trf_ch(start_point) +				if point.on_curve { +					// ctx.quadraticCurveTo(prev.x + x, prev.y + y, +					// point.x + x, point.y + y); +					// bmp.line(x0, y0, start_point.x + in_x, start_point.y + in_y, u32(0x00FF0000)) + +					//	start_point.x + in_x, start_point.y + in_y, u32(0xFF00FF00)) +					bmp.quadratic(x0, y0, start_point.x, start_point.y, start_point.x, +						start_point.y, color) +				} else { +					// ctx.quadraticCurveTo(prev.x + x, prev.y + y, +					//        (prev.x + point.x) / 2 + x, +					//        (prev.y + point.y) / 2 + y); + +					// bmp.line(x0, y0, start_point.x, start_point.y, u32(0x00FF0000) +					// u32(0xFF000000)) +					bmp.quadratic(x0, y0, start_point.x, start_point.y, (point.x + start_point.x) / 2, +						(point.y + start_point.y) / 2, color) +				} +			} else { +				// last point not in a curve +				// bmp.line(point.x, point.y, sp_x, sp_y, u32(0x00FF0000)) +				bmp.line(point.x, point.y, sp_x, sp_y, color) +			} +			contour_start = count + 1 +			s = 0 +			c++ +		} +	} + +	if bmp.style == .filled || bmp.style == .raw { +		bmp.exec_filler() +	} +	x_min := glyph.x_min +	x_max := glyph.x_max +	return x_min, x_max + +	// return glyph.x_min, glyph.x_max +} diff --git a/v_windows/v/old/vlib/x/ttf/render_sokol_cpu.v b/v_windows/v/old/vlib/x/ttf/render_sokol_cpu.v new file mode 100644 index 0000000..f0e60eb --- /dev/null +++ b/v_windows/v/old/vlib/x/ttf/render_sokol_cpu.v @@ -0,0 +1,210 @@ +module ttf + +/********************************************************************** +* +* BMP render module utility functions +* +* Copyright (c) 2021 Dario Deledda. All rights reserved. +* Use of this source code is governed by an MIT license +* that can be found in the LICENSE file. +* +* Note: +* +* TODO: +**********************************************************************/ +import math +import gg +import sokol.sgl + +pub struct TTF_render_Sokol { +pub mut: +	bmp &BitMap // Base bitmap render +	// rendering fields +	sg_img       C.sg_image // sokol image +	scale_reduct f32 = 2.0 // scale of the cpu texture for filtering +	device_dpi   int = 72 // device DPI +} + +/****************************************************************************** +* +* Render functions +* +******************************************************************************/ +pub fn (mut tf_skl TTF_render_Sokol) format_texture() { +	tf_skl.bmp.format_texture() +} + +pub fn (mut tf_skl TTF_render_Sokol) create_text(in_txt string, in_font_size f32) { +	scale_reduct := tf_skl.scale_reduct +	device_dpi := tf_skl.device_dpi +	font_size := in_font_size //* scale_reduct + +	// Formula: (font_size * device dpi) / (72dpi * em_unit) +	// scale := ((1.0  * devide_dpi )/ f32(72 * tf_skl.bmp.tf.units_per_em))* font_size +	scale := f32(font_size * device_dpi) / f32(72 * tf_skl.bmp.tf.units_per_em) +	// dprintln("Scale: $scale") + +	tf_skl.bmp.scale = scale * scale_reduct +	w, h := tf_skl.bmp.get_bbox(in_txt) +	tf_skl.bmp.width = int(w) +	tf_skl.bmp.height = int((h + 8)) +	sz := tf_skl.bmp.width * tf_skl.bmp.height * tf_skl.bmp.bp + +	// RAM buffer +	if sz > tf_skl.bmp.buf_size { +		if sz > 0 { +			unsafe { free(tf_skl.bmp.buf) } +		} +		dprintln('create_text Alloc: $sz bytes') +		tf_skl.bmp.buf = unsafe { malloc_noscan(sz) } +		tf_skl.bmp.buf_size = sz +	} + +	tf_skl.bmp.init_filler() + +	// draw the text +	mut y_base := int((tf_skl.bmp.tf.y_max - tf_skl.bmp.tf.y_min) * tf_skl.bmp.scale) +	tf_skl.bmp.set_pos(0, y_base) +	tf_skl.bmp.clear() +	tf_skl.bmp.draw_text(in_txt) +	tf_skl.format_texture() +} + +pub fn (mut tf_skl TTF_render_Sokol) create_text_block(in_txt string, in_w int, in_h int, in_font_size f32) { +	scale_reduct := tf_skl.scale_reduct +	device_dpi := tf_skl.device_dpi +	font_size := in_font_size //* scale_reduct +	// Formula: (font_size * device dpi) / (72dpi * em_unit) +	// scale := ((1.0  * devide_dpi )/ f32(72 * tf_skl.bmp.tf.units_per_em))* font_size +	scale := f32(font_size * device_dpi) / f32(72 * tf_skl.bmp.tf.units_per_em) +	// dprintln("Scale: $scale") + +	tf_skl.bmp.scale = scale * scale_reduct +	w := in_w +	h := in_h +	tf_skl.bmp.width = int(w * scale_reduct + 0.5) +	tf_skl.bmp.height = int((h + 2) * scale_reduct + 0.5) +	sz := tf_skl.bmp.width * tf_skl.bmp.height * tf_skl.bmp.bp + +	// if true { return } + +	// RAM buffer +	if sz > tf_skl.bmp.buf_size { +		if sz > 0 { +			unsafe { free(tf_skl.bmp.buf) } +		} +		dprintln('Alloc: $sz bytes') +		tf_skl.bmp.buf = unsafe { malloc_noscan(sz) } +		tf_skl.bmp.buf_size = sz +	} + +	tf_skl.bmp.init_filler() + +	// draw the text +	mut y_base := int((tf_skl.bmp.tf.y_max - tf_skl.bmp.tf.y_min) * tf_skl.bmp.scale) +	tf_skl.bmp.set_pos(0, y_base) +	tf_skl.bmp.clear() + +	tf_skl.bmp.draw_text_block(in_txt, x: 0, y: 0, w: w, h: h) +	tf_skl.format_texture() +} + +/****************************************************************************** +* +* Sokol Render functions +* +******************************************************************************/ +pub fn (mut tf_skl TTF_render_Sokol) create_texture() { +	w := tf_skl.bmp.width +	h := tf_skl.bmp.height +	sz := tf_skl.bmp.width * tf_skl.bmp.height * tf_skl.bmp.bp +	mut img_desc := C.sg_image_desc{ +		width: w +		height: h +		num_mipmaps: 0 +		min_filter: .linear +		mag_filter: .linear +		// usage: .dynamic +		wrap_u: .clamp_to_edge +		wrap_v: .clamp_to_edge +		label: &char(0) +		d3d11_texture: 0 +	} +	// comment for dynamic +	img_desc.data.subimage[0][0] = C.sg_range{ +		ptr: tf_skl.bmp.buf +		size: size_t(sz) +	} + +	simg := C.sg_make_image(&img_desc) +	// free(tf_skl.bmp.buf)  // DONT FREE IF Dynamic +	tf_skl.sg_img = simg +} + +pub fn (tf_skl TTF_render_Sokol) destroy_texture() { +	C.sg_destroy_image(tf_skl.sg_img) +} + +// Use only if usage: .dynamic +pub fn (mut tf_skl TTF_render_Sokol) update_text_texture() { +	sz := tf_skl.bmp.width * tf_skl.bmp.height * tf_skl.bmp.bp +	mut tmp_sbc := C.sg_image_data{} +	tmp_sbc.subimage[0][0] = C.sg_range{ +		ptr: tf_skl.bmp.buf +		size: size_t(sz) +	} +	C.sg_update_image(tf_skl.sg_img, &tmp_sbc) +} + +pub fn (tf_skl TTF_render_Sokol) draw_text_bmp(ctx &gg.Context, x f32, y f32) { +	// width  := tf_skl.bmp.width  >> 1 +	// height := tf_skl.bmp.height >> 1 +	sgl.push_matrix() + +	width := tf_skl.bmp.width / (tf_skl.scale_reduct) +	height := tf_skl.bmp.height / (tf_skl.scale_reduct) + +	u0 := f32(0.0) +	v0 := f32(0.0) +	u1 := f32(1.0) +	v1 := f32(1.0) +	x0 := f32(0) +	y0 := f32(0) +	x1 := f32(width) * ctx.scale +	y1 := f32(height) * ctx.scale + +	ca := f32(math.cos(tf_skl.bmp.angle)) +	sa := f32(math.sin(tf_skl.bmp.angle)) +	m := [ +		f32(ca), +		-sa, +		0, +		0, +		sa, +		ca, +		0, +		0, +		0, +		0, +		1, +		0, +		x * ctx.scale, +		y * ctx.scale, +		0, +		1, +	] +	sgl.mult_matrix(m) +	// +	sgl.load_pipeline(ctx.timage_pip) +	sgl.enable_texture() +	sgl.texture(tf_skl.sg_img) +	sgl.begin_quads() +	sgl.c4b(255, 255, 255, 255) +	sgl.v2f_t2f(x0, y0, u0, v0) +	sgl.v2f_t2f(x1, y0, u1, v0) +	sgl.v2f_t2f(x1, y1, u1, v1) +	sgl.v2f_t2f(x0, y1, u0, v1) +	sgl.end() +	sgl.disable_texture() +	sgl.pop_matrix() +} diff --git a/v_windows/v/old/vlib/x/ttf/text_block.v b/v_windows/v/old/vlib/x/ttf/text_block.v new file mode 100644 index 0000000..58e909d --- /dev/null +++ b/v_windows/v/old/vlib/x/ttf/text_block.v @@ -0,0 +1,120 @@ +module ttf + +/********************************************************************** +* +* BMP render module utility functions +* +* Copyright (c) 2021 Dario Deledda. All rights reserved. +* Use of this source code is governed by an MIT license +* that can be found in the LICENSE file. +* +* Note: +* +* TODO: +**********************************************************************/ +pub struct Text_block { +	x         int  // x postion of the left high corner +	y         int  // y postion of the left high corner +	w         int  // width of the text block +	h         int  // heigth of the text block +	cut_lines bool = true // force to cut the line if the length is over the text block width +} + +fn (mut dev BitMap) get_justify_space_cw(txt string, w int, block_w int, space_cw int) f32 { +	num_spaces := txt.count(' ') +	if num_spaces < 1 { +		return 0 +	} +	delta := block_w - w +	// println("num spc: $num_spaces") +	// println("delta: ${txt} w:$w bw:$block_w space_cw:$space_cw") +	res := f32(delta) / f32(num_spaces) / f32(space_cw) +	// println("res: $res") +	return res +} + +// write out a text +pub fn (mut bmp BitMap) draw_text_block(text string, block Text_block) { +	mut x := block.x +	mut y := block.y +	mut y_base := int((bmp.tf.y_max - bmp.tf.y_min) * bmp.scale) + +	// bmp.box(x, y, x + block.w, y + block.h, u32(0xFF00_0000)) + +	// spaces data +	mut space_cw, _ := bmp.tf.get_horizontal_metrics(u16(` `)) +	space_cw = int(space_cw * bmp.scale) + +	old_space_cw := bmp.space_cw + +	mut offset_flag := f32(0) // default .left align +	if bmp.align == .right { +		offset_flag = 1 +	} else if bmp.align == .center { +		offset_flag = 0.5 +	} + +	for txt in text.split_into_lines() { +		bmp.space_cw = old_space_cw +		mut w, _ := bmp.get_bbox(txt) +		if w <= block.w || block.cut_lines == false { +			// println("Solid block!") +			left_offset := int((block.w - w) * offset_flag) +			if bmp.justify && (f32(w) / f32(block.w)) >= bmp.justify_fill_ratio { +				bmp.space_cw = old_space_cw + bmp.get_justify_space_cw(txt, w, block.w, space_cw) +			} +			bmp.set_pos(x + left_offset, y + y_base) +			bmp.draw_text(txt) +			//---- DEBUG ---- +			// mut txt_w , mut txt_h := bmp.draw_text(txt) +			// bmp.box(x + left_offset,y+y_base - int((bmp.tf.y_min)*bmp.scale), x + txt_w + left_offset, y + y_base - int((bmp.tf.y_max) * bmp.scale), u32(0x00ff_0000) ) +			//--------------- +			y += y_base +		} else { +			// println("to cut: ${txt}") +			mut txt1 := txt.split(' ') +			mut c := txt1.len +			// mut done := false +			for c > 0 { +				tmp_str := txt1[0..c].join(' ') +				// println("tmp_str: ${tmp_str}") +				if tmp_str.len < 1 { +					break +				} + +				bmp.space_cw = old_space_cw +				w, _ = bmp.get_bbox(tmp_str) +				if w <= block.w { +					mut left_offset := int((block.w - w) * offset_flag) +					if bmp.justify && (f32(w) / f32(block.w)) >= bmp.justify_fill_ratio { +						// println("cut phase!") +						bmp.space_cw = 0.0 +						w, _ = bmp.get_bbox(tmp_str) +						left_offset = int((block.w - w) * offset_flag) +						bmp.space_cw = bmp.get_justify_space_cw(tmp_str, w, block.w, space_cw) +					} else { +						bmp.space_cw = old_space_cw +					} +					bmp.set_pos(x + left_offset, y + y_base) +					bmp.draw_text(tmp_str) +					//---- DEBUG ---- +					// txt_w , txt_h := bmp.draw_text(tmp_str) +					// println("printing [${x},${y}] => '${tmp_str}' space_cw: $bmp.space_cw") +					// bmp.box(x + left_offset,y + y_base - int((bmp.tf.y_min)*bmp.scale), x + txt_w + left_offset, y + y_base - int((bmp.tf.y_max) * bmp.scale), u32(0x00ff_0000) ) +					//--------------- +					y += y_base +					txt1 = txt1[c..] +					c = txt1.len +					//---- DEBUG ---- +					// txt2 := txt1.join(' ') +					// println("new string: ${txt2} len: ${c}") +					//--------------- +				} else { +					c-- +				} +			} +		} +	} + +	bmp.space_cw = old_space_cw +} diff --git a/v_windows/v/old/vlib/x/ttf/ttf.v b/v_windows/v/old/vlib/x/ttf/ttf.v new file mode 100644 index 0000000..fefa544 --- /dev/null +++ b/v_windows/v/old/vlib/x/ttf/ttf.v @@ -0,0 +1,1085 @@ +module ttf + +/********************************************************************** +* +* TrueTypeFont reader V implementation +* +* Copyright (c) 2021 Dario Deledda. All rights reserved. +* Use of this source code is governed by an MIT license +* that can be found in the LICENSE file. +* +* Note: +* - inspired by: http://stevehanov.ca/blog/?id=143 +* +* TODO: +* - check for unicode > 0xFFFF if supported +* - evaluate use a buffer for the points in the glyph +**********************************************************************/ +import strings + +/****************************************************************************** +* +* CMAP structs +* +******************************************************************************/ +struct Segment { +mut: +	id_range_offset u32 +	start_code      u16 +	end_code        u16 +	id_delta        u16 +} + +struct TrueTypeCmap { +mut: +	format   int +	cache    []int = []int{len: 65536, init: -1} // for now we allocate 2^16 charcode +	segments []Segment +	arr      []int +} + +/****************************************************************************** +* +* TTF_File structs +* +******************************************************************************/ +pub struct TTF_File { +pub mut: +	buf                     []byte +	pos                     u32 +	length                  u16 +	scalar_type             u32 +	search_range            u16 +	entry_selector          u16 +	range_shift             u16 +	tables                  map[string]Offset_Table +	version                 f32 +	font_revision           f32 +	checksum_adjustment     u32 +	magic_number            u32 +	flags                   u16 +	units_per_em            u16 +	created                 u64 +	modified                u64 +	x_min                   f32 +	y_min                   f32 +	x_max                   f32 +	y_max                   f32 +	mac_style               u16 +	lowest_rec_ppem         u16 +	font_direction_hint     i16 +	index_to_loc_format     i16 +	glyph_data_format       i16 +	font_family             string +	font_sub_family         string +	full_name               string +	postscript_name         string +	cmaps                   []TrueTypeCmap +	ascent                  i16 +	descent                 i16 +	line_gap                i16 +	advance_width_max       u16 +	min_left_side_bearing   i16 +	min_right_side_bearing  i16 +	x_max_extent            i16 +	caret_slope_rise        i16 +	caret_slope_run         i16 +	caret_offset            i16 +	metric_data_format      i16 +	num_of_long_hor_metrics u16 +	kern                    []Kern0Table +	// cache +	glyph_cache map[int]Glyph +} + +pub fn (mut tf TTF_File) init() { +	tf.read_offset_tables() +	tf.read_head_table() +	// dprintln(tf.get_info_string()) +	tf.read_name_table() +	tf.read_cmap_table() +	tf.read_hhea_table() +	tf.read_kern_table() +	tf.length = tf.glyph_count() +	dprintln('Number of symbols: $tf.length') +	dprintln('*****************************') +} + +/****************************************************************************** +* +* TTF_File Glyph Structs +* +******************************************************************************/ +pub struct Point { +pub mut: +	x        int +	y        int +	on_curve bool +} + +struct Gylph_Component { +mut: +	points []Point +} + +// type of glyph +const ( +	g_type_simple  = u16(1) // simple type +	g_type_complex = u16(2) // compound type +) + +pub struct Glyph { +pub mut: +	g_type             u16 = ttf.g_type_simple +	contour_ends       []u16 +	number_of_contours i16 +	points             []Point +	x_min              i16 +	x_max              i16 +	y_min              i16 +	y_max              i16 +	valid_glyph        bool +	components         []Component +} + +/****************************************************************************** +* +* TTF_File metrics and glyph +* +******************************************************************************/ +pub fn (mut tf TTF_File) get_horizontal_metrics(glyph_index u16) (int, int) { +	assert 'hmtx' in tf.tables +	old_pos := tf.pos +	mut offset := tf.tables['hmtx'].offset + +	mut advance_width := 0 +	mut left_side_bearing := 0 +	if glyph_index < tf.num_of_long_hor_metrics { +		offset += glyph_index * 4 +		tf.pos = offset +		advance_width = tf.get_u16() +		left_side_bearing = tf.get_i16() +		// dprintln("Found h_metric aw: $advance_width lsb: $left_side_bearing") +	} else { +		// read the last entry of the hMetrics array +		tf.pos = offset + (tf.num_of_long_hor_metrics - 1) * 4 +		advance_width = tf.get_u16() +		tf.pos = offset + tf.num_of_long_hor_metrics * 4 + +			2 * (glyph_index - tf.num_of_long_hor_metrics) +		left_side_bearing = tf.get_fword() +	} +	tf.pos = old_pos +	return advance_width, left_side_bearing +} + +fn (mut tf TTF_File) get_glyph_offset(index u32) u32 { +	// check if needed tables exists +	assert 'loca' in tf.tables +	assert 'glyf' in tf.tables +	mut old_pos := tf.pos + +	table := tf.tables['loca'] +	mut offset := u32(0) +	mut next := u32(0) +	if tf.index_to_loc_format == 1 { +		tf.pos = table.offset + (index << 2) +		offset = tf.get_u32() +		next = tf.get_u32() +	} else { +		tf.pos = table.offset + (index << 1) +		offset = tf.get_u16() << 1 +		next = tf.get_u16() << 1 +	} + +	if offset == next { +		// indicates glyph has no outline( eg space) +		return 0 +	} +	// dprintln("Offset for glyph index $index is $offset") +	tf.pos = old_pos +	return offset + tf.tables['glyf'].offset +} + +fn (mut tf TTF_File) glyph_count() u16 { +	assert 'maxp' in tf.tables +	old_pos := tf.pos +	tf.pos = tf.tables['maxp'].offset + 4 +	count := tf.get_u16() +	tf.pos = old_pos +	return count +} + +pub fn (mut tf TTF_File) read_glyph_dim(index u16) (int, int, int, int) { +	offset := tf.get_glyph_offset(index) +	// dprintln("offset: $offset") +	if offset == 0 || offset >= tf.tables['glyf'].offset + tf.tables['glyf'].length { +		dprintln('No glyph found!') +		return 0, 0, 0, 0 +	} + +	assert offset >= tf.tables['glyf'].offset +	assert offset < tf.tables['glyf'].offset + tf.tables['glyf'].length + +	tf.pos = offset +	// dprintln("file seek read_glyph: $tf.pos") + +	// number_of_contours +	_ := tf.get_i16() +	x_min := tf.get_fword() +	y_min := tf.get_fword() +	x_max := tf.get_fword() +	y_max := tf.get_fword() + +	return x_min, x_max, y_min, y_max +} + +pub fn (mut tf TTF_File) read_glyph(index u16) Glyph { +	index_int := int(index) // index.str() +	if index_int in tf.glyph_cache { +		// dprintln("Found glyp: ${index}") +		return tf.glyph_cache[index_int] +	} +	// dprintln("Create glyp: ${index}") + +	offset := tf.get_glyph_offset(index) +	// dprintln("offset: $offset") +	if offset == 0 || offset >= tf.tables['glyf'].offset + tf.tables['glyf'].length { +		dprintln('No glyph found!') +		return Glyph{} +	} + +	assert offset >= tf.tables['glyf'].offset +	assert offset < tf.tables['glyf'].offset + tf.tables['glyf'].length + +	tf.pos = offset +	// dprintln("file seek read_glyph: $tf.pos") + +	/* +	---- BUG TO SOLVE ----- +	--- Order of the data if printed in the main is shuffled!! Very Strange +	mut tmp_glyph := Glyph{ +		number_of_contours : tf.get_i16() +		x_min : tf.get_fword() +		y_min : tf.get_fword() +		x_max : tf.get_fword() +		y_max : tf.get_fword() +	} +	*/ + +	mut tmp_glyph := Glyph{} +	tmp_glyph.number_of_contours = tf.get_i16() +	tmp_glyph.x_min = tf.get_fword() +	tmp_glyph.y_min = tf.get_fword() +	tmp_glyph.x_max = tf.get_fword() +	tmp_glyph.y_max = tf.get_fword() + +	// dprintln("file seek after read_glyph: $tf.pos") + +	assert tmp_glyph.number_of_contours >= -1 + +	if tmp_glyph.number_of_contours == -1 { +		// dprintln("read_compound_glyph") +		tf.read_compound_glyph(mut tmp_glyph) +	} else { +		// dprintln("read_simple_glyph") +		tf.read_simple_glyph(mut tmp_glyph) +	} + +	tf.glyph_cache[index_int] = tmp_glyph +	return tmp_glyph +} + +const ( +	tfk_on_curve  = 1 +	tfk_x_is_byte = 2 +	tfk_y_is_byte = 4 +	tfk_repeat    = 8 +	tfk_x_delta   = 16 +	tfk_y_delta   = 32 +) + +fn (mut tf TTF_File) read_simple_glyph(mut in_glyph Glyph) { +	if in_glyph.number_of_contours == 0 { +		return +	} + +	for _ in 0 .. in_glyph.number_of_contours { +		in_glyph.contour_ends << tf.get_u16() +	} + +	// skip over intructions +	tf.pos = tf.get_u16() + tf.pos + +	mut num_points := 0 +	for ce in in_glyph.contour_ends { +		if ce > num_points { +			num_points = ce +		} +	} +	num_points++ + +	mut i := 0 +	mut flags := []byte{} +	for i < num_points { +		flag := tf.get_u8() +		flags << flag +		in_glyph.points << Point{ +			x: 0 +			y: 0 +			on_curve: (flag & ttf.tfk_on_curve) > 0 +		} +		if (flag & ttf.tfk_repeat) > 0 { +			mut repeat_count := tf.get_u8() +			assert repeat_count > 0 +			i += repeat_count +			for repeat_count > 0 { +				flags << flag +				in_glyph.points << Point{ +					x: 0 +					y: 0 +					on_curve: (flag & ttf.tfk_on_curve) > 0 +				} +				repeat_count-- +			} +		} +		i++ +	} + +	// read coords x +	mut value := 0 +	for i_x in 0 .. num_points { +		flag_x := flags[i_x] +		if (flag_x & ttf.tfk_x_is_byte) > 0 { +			if (flag_x & ttf.tfk_x_delta) > 0 { +				value += tf.get_u8() +			} else { +				value -= tf.get_u8() +			} +		} else if (~flag_x & ttf.tfk_x_delta) > 0 { +			value += tf.get_i16() +		} else { +			// value is unchanged +		} +		// dprintln("$i_x x: $value") +		in_glyph.points[i_x].x = value +	} + +	// read coords y +	value = 0 +	for i_y in 0 .. num_points { +		flag_y := flags[i_y] +		if (flag_y & ttf.tfk_y_is_byte) > 0 { +			if (flag_y & ttf.tfk_y_delta) > 0 { +				value += tf.get_u8() +			} else { +				value -= tf.get_u8() +			} +		} else if (~flag_y & ttf.tfk_y_delta) > 0 { +			value += tf.get_i16() +		} else { +			// value is unchanged +		} +		// dprintln("$i_y y: $value") +		in_glyph.points[i_y].y = value +	} + +	// ok we have a valid glyph +	in_glyph.valid_glyph = true +} + +const ( +	tfkc_arg_1_and_2_are_words    = 1 +	tfkc_args_are_xy_values       = 2 +	tfkc_round_xy_to_grid         = 4 +	tfkc_we_have_a_scale          = 8 +	// reserved                   = 16 +	tfkc_more_components          = 32 +	tfkc_we_have_an_x_and_y_scale = 64 +	tfkc_we_have_a_two_by_two     = 128 +	tfkc_we_have_instructions     = 256 +	tfkc_use_my_metrics           = 512 +	tfkc_overlap_component        = 1024 +) + +struct Component { +mut: +	glyph_index      u16 +	dest_point_index i16 +	src_point_index  i16 +	matrix           []f32 = [f32(1.0), 0, 0, 1.0, 0, 0] +} + +fn (mut tf TTF_File) read_compound_glyph(mut in_glyph Glyph) { +	in_glyph.g_type = ttf.g_type_complex +	mut component := Component{} +	mut flags := ttf.tfkc_more_components +	for (flags & ttf.tfkc_more_components) > 0 { +		mut arg1 := i16(0) +		mut arg2 := i16(0) + +		flags = tf.get_u16() + +		component.glyph_index = tf.get_u16() + +		if (flags & ttf.tfkc_arg_1_and_2_are_words) > 0 { +			arg1 = tf.get_i16() +			arg2 = tf.get_i16() +		} else { +			arg1 = tf.get_u8() +			arg2 = tf.get_u8() +		} + +		if (flags & ttf.tfkc_args_are_xy_values) > 0 { +			component.matrix[4] = arg1 +			component.matrix[5] = arg2 +		} else { +			component.dest_point_index = arg1 +			component.src_point_index = arg2 +		} + +		if (flags & ttf.tfkc_we_have_a_scale) > 0 { +			component.matrix[0] = tf.get_2dot14() +			component.matrix[3] = component.matrix[0] +		} else if (flags & ttf.tfkc_we_have_an_x_and_y_scale) > 0 { +			component.matrix[0] = tf.get_2dot14() +			component.matrix[3] = tf.get_2dot14() +		} else if (flags & ttf.tfkc_we_have_a_two_by_two) > 0 { +			component.matrix[0] = tf.get_2dot14() +			component.matrix[1] = tf.get_2dot14() +			component.matrix[2] = tf.get_2dot14() +			component.matrix[3] = tf.get_2dot14() +		} +		// dprintln("Read component glyph index ${component.glyph_index}") +		// dprintln("Transform: ${component.matrix}") + +		old_pos := tf.pos + +		simple_glyph := tf.read_glyph(component.glyph_index) +		if simple_glyph.valid_glyph { +			point_offset := in_glyph.points.len +			for i in 0 .. simple_glyph.contour_ends.len { +				in_glyph.contour_ends << u16(simple_glyph.contour_ends[i] + point_offset) +			} + +			for p in simple_glyph.points { +				mut x := f32(p.x) +				mut y := f32(p.y) +				x = component.matrix[0] * x + component.matrix[1] * y + component.matrix[4] +				y = component.matrix[2] * x + component.matrix[3] * y + component.matrix[5] +				in_glyph.points << Point{ +					x: int(x) +					y: int(y) +					on_curve: p.on_curve +				} +			} +		} +		tf.pos = old_pos +	} + +	in_glyph.number_of_contours = i16(in_glyph.contour_ends.len) + +	if (flags & ttf.tfkc_we_have_instructions) > 0 { +		tf.pos = tf.get_u16() + tf.pos +	} +	// ok we have a valid glyph +	in_glyph.valid_glyph = true +} + +/****************************************************************************** +* +* TTF_File get functions +* +******************************************************************************/ +fn (mut tf TTF_File) get_u8() byte { +	x := tf.buf[tf.pos] +	tf.pos++ +	return byte(x) +} + +fn (mut tf TTF_File) get_i8() i8 { +	return i8(tf.get_u8()) +} + +fn (mut tf TTF_File) get_u16() u16 { +	x := u16(tf.buf[tf.pos] << u16(8)) | u16(tf.buf[tf.pos + 1]) +	tf.pos += 2 +	return x +} + +fn (mut tf TTF_File) get_ufword() u16 { +	return tf.get_u16() +} + +fn (mut tf TTF_File) get_i16() i16 { +	return i16(tf.get_u16()) +} + +fn (mut tf TTF_File) get_fword() i16 { +	return tf.get_i16() +} + +fn (mut tf TTF_File) get_u32() u32 { +	x := (u32(tf.buf[tf.pos]) << u32(24)) | (u32(tf.buf[tf.pos + 1]) << u32(16)) | (u32(tf.buf[ +		tf.pos + 2]) << u32(8)) | u32(tf.buf[tf.pos + 3]) +	tf.pos += 4 +	return x +} + +fn (mut tf TTF_File) get_i32() int { +	return int(tf.get_u32()) +} + +fn (mut tf TTF_File) get_2dot14() f32 { +	return f32(tf.get_i16()) / f32(i16(1 << 14)) +} + +fn (mut tf TTF_File) get_fixed() f32 { +	return f32(tf.get_i32() / f32(1 << 16)) +} + +fn (mut tf TTF_File) get_string(length int) string { +	tmp_pos := u64(tf.pos) +	tf.pos += u32(length) +	return unsafe { tos(&byte(u64(tf.buf.data) + tmp_pos), length) } +} + +fn (mut tf TTF_File) get_unicode_string(length int) string { +	mut tmp_txt := strings.new_builder(length) +	mut real_len := 0 + +	for _ in 0 .. (length >> 1) { +		c := tf.get_u16() +		c_len := ((0xe5000000 >> ((c >> 3) & 0x1e)) & 3) + 1 +		real_len += c_len +		if c_len == 1 { +			tmp_txt.write_b(byte(c & 0xff)) +		} else { +			tmp_txt.write_b(byte((c >> 8) & 0xff)) +			tmp_txt.write_b(byte(c & 0xff)) +		} +		// dprintln("c: ${c:c}|${ byte(c &0xff) :c} c_len: ${c_len} str_len: ${real_len} in_len: ${length}") +	} +	tf.pos += u32(real_len) +	res_txt := tmp_txt.str() +	// dprintln("get_unicode_string: ${res_txt}") +	return res_txt +} + +fn (mut tf TTF_File) get_date() u64 { +	// get mac time and covert it to unix timestamp +	mac_time := (u64(tf.get_u32()) << 32) + u64(tf.get_u32()) +	utc_time := mac_time - u64(2082844800) +	return utc_time +} + +fn (mut tf TTF_File) calc_checksum(offset u32, length u32) u32 { +	old_index := tf.pos +	mut sum := u64(0) +	mut nlongs := int((length + 3) >> 2) +	tf.pos = offset +	// dprintln("offs: $offset nlongs: $nlongs") +	for nlongs > 0 { +		sum = sum + u64(tf.get_u32()) +		nlongs-- +	} +	tf.pos = old_index +	return u32(sum & u64(0xffff_ffff)) +} + +/****************************************************************************** +* +* Offset_Table +* +******************************************************************************/ +struct Offset_Table { +mut: +	checksum u32 +	offset   u32 +	length   u32 +} + +fn (mut tf TTF_File) read_offset_tables() { +	dprintln('*** READ TABLES OFFSET ***') +	tf.pos = 0 +	tf.scalar_type = tf.get_u32() +	num_tables := tf.get_u16() +	tf.search_range = tf.get_u16() +	tf.entry_selector = tf.get_u16() +	tf.range_shift = tf.get_u16() + +	dprintln('scalar_type   : [0x$tf.scalar_type.hex()]') +	dprintln('num tables    : [$num_tables]') +	dprintln('search_range  : [0x$tf.search_range.hex()]') +	dprintln('entry_selector: [0x$tf.entry_selector.hex()]') +	dprintln('range_shift   : [0x$tf.range_shift.hex()]') + +	mut i := 0 +	for i < num_tables { +		tag := tf.get_string(4) +		tf.tables[tag] = Offset_Table{ +			checksum: tf.get_u32() +			offset: tf.get_u32() +			length: tf.get_u32() +		} +		dprintln('Table: [$tag]') +		// dprintln("${tf.tables[tag]}") + +		if tag != 'head' { +			assert tf.calc_checksum(tf.tables[tag].offset, tf.tables[tag].length) == tf.tables[tag].checksum +		} +		i++ +	} +	dprintln('*** END READ TABLES OFFSET ***') +} + +/****************************************************************************** +* +* Head_Table +* +******************************************************************************/ +fn (mut tf TTF_File) read_head_table() { +	dprintln('*** READ HEAD TABLE ***') +	tf.pos = tf.tables['head'].offset +	dprintln('Offset: $tf.pos') + +	tf.version = tf.get_fixed() +	tf.font_revision = tf.get_fixed() +	tf.checksum_adjustment = tf.get_u32() +	tf.magic_number = tf.get_u32() +	assert tf.magic_number == 0x5f0f3cf5 +	tf.flags = tf.get_u16() +	tf.units_per_em = tf.get_u16() +	tf.created = tf.get_date() +	tf.modified = tf.get_date() +	tf.x_min = tf.get_i16() +	tf.y_min = tf.get_i16() +	tf.x_max = tf.get_i16() +	tf.y_max = tf.get_i16() +	tf.mac_style = tf.get_u16() +	tf.lowest_rec_ppem = tf.get_u16() +	tf.font_direction_hint = tf.get_i16() +	tf.index_to_loc_format = tf.get_i16() +	tf.glyph_data_format = tf.get_i16() +} + +/****************************************************************************** +* +* Name_Table +* +******************************************************************************/ +fn (mut tf TTF_File) read_name_table() { +	dprintln('*** READ NAME TABLE ***') +	assert 'name' in tf.tables +	table_offset := tf.tables['name'].offset +	tf.pos = tf.tables['name'].offset + +	format := tf.get_u16() // must be 0 +	assert format == 0 +	count := tf.get_u16() +	string_offset := tf.get_u16() + +	for _ in 0 .. count { +		platform_id := tf.get_u16() +		// platform_specific_id := +		tf.get_u16() +		// language_id          := +		tf.get_u16() +		name_id := tf.get_u16() +		length := tf.get_u16() +		offset := tf.get_u16() + +		old_pos := tf.pos +		tf.pos = table_offset + string_offset + offset + +		mut name := '' +		if platform_id == 0 || platform_id == 3 { +			name = tf.get_unicode_string(length) +		} else { +			name = tf.get_string(length) +		} +		// dprintln("Name [${platform_id} / ${platform_specific_id}] id:[$name_id] language:[$language_id] [$name]") +		tf.pos = old_pos + +		match name_id { +			1 { tf.font_family = name } +			2 { tf.font_sub_family = name } +			4 { tf.full_name = name } +			6 { tf.postscript_name = name } +			else {} +		} +	} +} + +/****************************************************************************** +* +* Cmap_Table +* +******************************************************************************/ +fn (mut tf TTF_File) read_cmap_table() { +	dprintln('*** READ CMAP TABLE ***') +	assert 'cmap' in tf.tables +	table_offset := tf.tables['cmap'].offset +	tf.pos = table_offset + +	version := tf.get_u16() // must be 0 +	assert version == 0 +	number_sub_tables := tf.get_u16() + +	// tables must be sorted by platform id and then platform specific +	// encoding. +	for _ in 0 .. number_sub_tables { +		// platforms are: +		// 0 - Unicode -- use specific id 6 for full coverage. 0/4 common. +		// 1 - Macintosh (Discouraged) +		// 2 - reserved +		// 3 - Microsoft +		platform_id := tf.get_u16() +		platform_specific_id := tf.get_u16() +		offset := tf.get_u32() +		dprintln('CMap platform_id=$platform_id specific_id=$platform_specific_id offset=$offset') +		if platform_id == 3 && platform_specific_id <= 1 { +			tf.read_cmap(table_offset + offset) +		} +	} +} + +fn (mut tf TTF_File) read_cmap(offset u32) { +	old_pos := tf.pos +	tf.pos = offset +	format := tf.get_u16() +	length := tf.get_u16() +	language := tf.get_u16() + +	dprintln('  Cmap format: $format length: $length language: $language') +	if format == 0 { +		dprintln('  Cmap 0 Init...') +		mut cmap := TrueTypeCmap{} +		cmap.init_0(mut tf) +		tf.cmaps << cmap +	} else if format == 4 { +		dprintln('  Cmap 4 Init...') +		mut cmap := TrueTypeCmap{} +		cmap.init_4(mut tf) +		tf.cmaps << cmap +	} + +	tf.pos = old_pos +} + +/****************************************************************************** +* +* CMAPS 0/4 +* +******************************************************************************/ +fn (mut tf TTF_File) map_code(char_code int) u16 { +	mut index := 0 +	for i in 0 .. tf.cmaps.len { +		mut cmap := tf.cmaps[i] +		if cmap.format == 0 { +			// dprintln("format 0") +			index = cmap.map_0(char_code) +		} else if cmap.format == 4 { +			// dprintln("format 4") +			index = cmap.map_4(char_code, mut tf) +		} +	} +	return u16(index) +} + +fn (mut tm TrueTypeCmap) init_0(mut tf TTF_File) { +	tm.format = 0 +	for i in 0 .. 256 { +		glyph_index := tf.get_u8() +		dprintln('   Glyph[$i] = %glyph_index') +		tm.arr << glyph_index +	} +} + +fn (mut tm TrueTypeCmap) map_0(char_code int) int { +	if char_code >= 0 && char_code <= 255 { +		// dprintln("charCode $char_code maps to ${tm.arr[char_code]}") +		return tm.arr[char_code] +	} +	return 0 +} + +fn (mut tm TrueTypeCmap) init_4(mut tf TTF_File) { +	tm.format = 4 + +	// 2x segcount +	seg_count := tf.get_u16() >> 1 +	// search_range   := +	tf.get_u16() +	// entry_selector := +	tf.get_u16() +	// range_shift    := +	tf.get_u16() + +	// Ending character code for each segment, last is 0xffff +	for _ in 0 .. seg_count { +		tm.segments << Segment{0, 0, tf.get_u16(), 0} +	} + +	// reservePAD +	tf.get_u16() + +	// starting character code for each segment +	for i in 0 .. seg_count { +		tm.segments[i].start_code = tf.get_u16() +	} + +	// Delta for all character codes in segment +	for i in 0 .. seg_count { +		tm.segments[i].id_delta = tf.get_u16() +	} + +	// offset in bytes to glyph indexArray, or 0 +	for i in 0 .. seg_count { +		ro := u32(tf.get_u16()) +		if ro != 0 { +			tm.segments[i].id_range_offset = tf.pos - 2 + ro +		} else { +			tm.segments[i].id_range_offset = 0 +		} +	} +	/* +	// DEBUG LOG +	for i in 0..seg_count { +	seg := tm.segments[i] +	dprintln("    segments[$i] = $seg.start_code $seg.end_code $seg.id_delta $seg.id_range_offset") +	} +	*/ +} + +fn (mut tm TrueTypeCmap) map_4(char_code int, mut tf TTF_File) int { +	// dprintln("HERE map_4 for char [$char_code]") +	old_pos := tf.pos +	if tm.cache[char_code] == -1 { +		// dprintln("Not found, search for it!") +		mut found := false +		for segment in tm.segments { +			if segment.start_code <= char_code && segment.end_code >= char_code { +				mut index := (segment.id_delta + char_code) & 0xffff +				if segment.id_range_offset > 0 { +					glyph_index_address := segment.id_range_offset + +						2 * u32(char_code - segment.start_code) +					tf.pos = glyph_index_address +					index = tf.get_u16() +				} + +				tm.cache[char_code] = index +				found = true +				break +			} +		} +		if !found { +			tm.cache[char_code] = 0 +		} +	} +	tf.pos = old_pos +	return tm.cache[char_code] +} + +/****************************************************************************** +* +* Hhea table +* +******************************************************************************/ +fn (mut tf TTF_File) read_hhea_table() { +	dprintln('*** READ HHEA TABLE ***') +	assert 'hhea' in tf.tables +	table_offset := tf.tables['hhea'].offset +	tf.pos = table_offset + +	// version := +	tf.get_fixed() // 0x00010000 + +	tf.ascent = tf.get_fword() +	tf.descent = tf.get_fword() +	tf.line_gap = tf.get_fword() +	tf.advance_width_max = tf.get_ufword() +	tf.min_left_side_bearing = tf.get_fword() +	tf.min_right_side_bearing = tf.get_fword() +	tf.x_max_extent = tf.get_fword() +	tf.caret_slope_rise = tf.get_i16() +	tf.caret_slope_run = tf.get_i16() +	tf.caret_offset = tf.get_fword() +	tf.get_i16() // reserved +	tf.get_i16() // reserved +	tf.get_i16() // reserved +	tf.get_i16() // reserved +	tf.metric_data_format = tf.get_i16() +	tf.num_of_long_hor_metrics = tf.get_u16() +} + +/****************************************************************************** +* +* Kern table +* +******************************************************************************/ +struct Kern0Table { +mut: +	swap      bool +	offset    u32 +	n_pairs   int +	kmap      map[u32]i16 +	old_index int = -1 +} + +fn (mut kt Kern0Table) reset() { +	kt.old_index = -1 +} + +fn (mut kt Kern0Table) get(glyph_index int) (int, int) { +	mut x := 0 + +	if kt.old_index >= 0 { +		ch := ((u32(kt.old_index & 0xFFFF) << 16) | u32(glyph_index & 0xFFFF)) +		// dprintln("kern_get: $ch") +		if ch in kt.kmap { +			x = int(kt.kmap[ch]) +		} +	} +	kt.old_index = glyph_index +	if kt.swap { +		return 0, x +	} +	return x, 0 +} + +fn (mut tf TTF_File) create_kern_table0(vertical bool, cross bool) Kern0Table { +	offset := tf.pos +	n_pairs := tf.get_u16() +	search_range := tf.get_u16() +	entry_selector := tf.get_u16() +	range_shift := tf.get_u16() +	dprintln('n_pairs: $n_pairs search_range: $search_range entry_selector: $entry_selector range_shift: $range_shift') + +	mut kt0 := Kern0Table{ +		swap: (vertical && !cross) || (!vertical && cross) +		offset: offset +		n_pairs: n_pairs +	} + +	for _ in 0 .. n_pairs { +		left := tf.get_u16() +		right := tf.get_u16() +		value := tf.get_fword() +		tmp_index := (u32(left) << 16) | u32(right) +		kt0.kmap[tmp_index] = value +		// dprintln("index: ${tmp_index.hex()} val: ${value.hex()}") +	} +	kt0.old_index = -1 +	return kt0 +} + +fn (mut tf TTF_File) read_kern_table() { +	dprintln('*** READ KERN TABLE ***') +	if !('kern' in tf.tables) { +		return +	} +	table_offset := tf.tables['kern'].offset +	tf.pos = table_offset + +	version := tf.get_u16() // must be 0 +	assert version == 0 // must be 0 +	n_tables := tf.get_u16() + +	dprintln('Kern Table version: $version Kern nTables: $n_tables') + +	for _ in 0 .. n_tables { +		st_version := tf.get_u16() // sub table version +		length := tf.get_u16() +		coverage := tf.get_u16() +		format := coverage >> 8 +		cross := coverage & 4 +		vertical := (coverage & 0x1) == 0 +		dprintln('Kerning subtable version [$st_version] format [$format] length [$length] coverage: [$coverage.hex()]') +		if format == 0 { +			dprintln('kern format: 0') +			kern := tf.create_kern_table0(vertical, cross != 0) +			tf.kern << kern +		} else { +			dprintln('Unknown format -- skip') +			tf.pos = tf.pos + length +		} +	} +} + +fn (mut tf TTF_File) reset_kern() { +	for i in 0 .. tf.kern.len { +		tf.kern[i].reset() +	} +} + +fn (mut tf TTF_File) next_kern(glyph_index int) (int, int) { +	mut x := 0 +	mut y := 0 +	for i in 0 .. tf.kern.len { +		tmp_x, tmp_y := tf.kern[i].get(glyph_index) +		x = x + tmp_x +		y = y + tmp_y +	} +	return x, y +} + +/****************************************************************************** +* +* TTF_File Utility +* +******************************************************************************/ +pub fn (tf TTF_File) get_info_string() string { +	txt := '----- Font Info ----- +font_family     : $tf.font_family +font_sub_family : $tf.font_sub_family +full_name       : $tf.full_name +postscript_name : $tf.postscript_name +version         : $tf.version +font_revision   : $tf.font_revision +magic_number    : $tf.magic_number.hex() +flags           : $tf.flags.hex() +created  unixTS : $tf.created +modified unixTS : $tf.modified +box             : [x_min:$tf.x_min, y_min:$tf.y_min, x_Max:$tf.x_max, y_Max:$tf.y_max] +mac_style       : $tf.mac_style +----------------------- +' +	return txt +} + +/****************************************************************************** +* +* TTF_File test +* +******************************************************************************/ +fn tst() { +	mut tf := TTF_File{} + +	tf.buf = [ +		byte(0xFF), /* 8  bit */ +		0xF1, +		0xF2, /* 16 bit */ +		0x81, +		0x23, +		0x45, +		0x67, /* 32 bit */ +		0x12, +		0x34, +		0x12, +		0x34, /* get_2dot14 16 bit */ +		0x12, +		0x34, +		0x12, +		0x34 /* get_fixed 32 bit int */, +	] +	assert tf.get_u8().hex() == 'ff' +	assert tf.get_u16().hex() == 'f1f2' +	assert tf.get_u32().hex() == '81234567' + +	dprintln('buf len: $tf.buf.len') +	// dprintln( tf.get_u8().hex() ) +	// dprintln( tf.get_u16().hex() ) +	// dprintln( tf.get_u32().hex() ) +	// dprintln( tf.get_2dot14() ) +	// dprintln( tf.get_fixed() ) +} diff --git a/v_windows/v/old/vlib/x/ttf/ttf_test.v b/v_windows/v/old/vlib/x/ttf/ttf_test.v new file mode 100644 index 0000000..7a6f020 --- /dev/null +++ b/v_windows/v/old/vlib/x/ttf/ttf_test.v @@ -0,0 +1,237 @@ +import x.ttf +import os +import strings + +/********************************************************************** +* +* BMP render module utility functions +* +* Copyright (c) 2021 Dario Deledda. All rights reserved. +* Use of this source code is governed by an MIT license +* that can be found in the LICENSE file. +* +* Note: +* use `v -d create_data vlib/x/ttf/ttf_test.v` to generate binary data for this test file +* +* TODO: +* - manage text directions R to L +**********************************************************************/ +const font_path = 'Qarmic_sans_Abridged.ttf' + +const font_bytes = $embed_file('ttf_test_data.bin') + +const test_data = ' +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +00bf bfbf bfbf bfbf bfbf bfbf bf00 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +00bf bfbf bfbf bfbf bfbf bfbf bf00 0000 +bfff ffff ffff ffff ffff ffff ffbf 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +bfff ffff ffff ffff ffff ffff ffbf 0000 +00bf ffff ffbf ffff bfff ffff bf00 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +00bf ffff ffbf ffff bfff ffff bf00 0000 +0000 0000 00bf ffff bf00 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +bf00 0000 0000 0000 0000 0000 0000 0000 +0000 0000 00bf ffff bf00 0000 0000 0000 +0000 0000 00bf ffff bf00 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 00bf +ffbf 0000 0000 0000 0000 0000 0000 0000 +0000 0000 00bf ffff bf00 0000 0000 0000 +0000 0000 00bf ffff bf00 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 00bf +ffbf 0000 0000 0000 0000 0000 0000 0000 +0000 0000 00bf ffff bf00 0000 0000 0000 +0000 0000 00bf ffff bf00 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 bfbf +ffbf bfbf bf00 0000 0000 0000 0000 0000 +0000 0000 00bf ffff bf00 0000 0000 0000 +0000 0000 00bf ffff bf00 0000 0032 72bf +bfbf 0000 0000 bfbf bfbf 5400 00bf ffff +ffff ffff ffbf 0000 0000 0000 0000 0000 +0000 0000 00bf ffff bf00 0000 0032 72bf +0000 0000 00bf ffff bf00 0065 9999 ffff +ffff bf00 00bf ffff ffff ff7f 0000 bfff +bfff bfff bf00 0000 0000 0000 0000 0000 +0000 0000 00bf ffff bf00 0065 9999 6500 +0000 0000 00bf ffff bf00 bfff ffff ffbf +ffff ffbf bfff bfff bfbf ffff bf00 bfff +bf00 0000 0000 0000 0000 0000 0000 0000 +0000 0000 00bf ffff bf00 bf72 3300 7fbf +0000 0000 00bf ffff bf7f 5fff ffbf 3f7f +8fbf ffbf ffbf 5500 0000 5fbf 0000 bfff +bf00 0000 0000 0000 0000 0000 0000 0000 +0000 0000 00bf ffff bf7f 5fff ffbf 3f7f +0000 0000 00bf ffff bfbf ffbf bfbf ffff +ffff ffbf ffff ff7f 0000 0000 0000 bfff +bf00 0000 0000 0000 0000 0000 0000 0000 +0000 0000 00bf ffff bfbf 00bf bfbf 8f5f +0000 0000 00bf ffff 7f5f ffff ffff ffff +ffff ffbf 5fbf ffff bfbf bfbf 0000 bfff +bf00 0000 0000 0000 0000 0000 0000 0000 +0000 0000 00bf ffff 7f5f 0000 0000 0000 +0000 0000 00bf ffff bfff bfff ffbf ffff +ffff ffbf 0000 5fbf ffff ffff bf00 bfff +bf00 0000 0000 0000 0000 0000 0000 0000 +0000 0000 00bf ffff bfff bfff ffbf ffff +0000 0000 00bf ffff bfff bf00 0000 0000 +0000 0000 0000 0000 7f7f ffff bf00 bfff +bf00 0000 bf00 0000 0000 0000 0000 0000 +0000 0000 00bf ffff bfff bf00 0000 0000 +0000 0000 00bf ffff bfff bf00 0000 0000 +0000 bf00 bf00 0000 0055 bfff ffbf bfff +ff7f 00bf ff5f 0000 0000 0000 0000 0000 +0000 0000 00bf ffff bfff bf00 0000 0000 +0000 0000 00bf ffff bfbf ffbf 0000 0055 +7fbf ffbf ffbf 7f55 00bf ffff bf00 7f5f +ff7f 7f5f ffbf 0000 0000 0000 0000 0000 +0000 0000 00bf ffff bfbf ffbf 0000 0055 +0000 0000 00bf ffff bfbf ffff bfbf bfff +ffff bfbf ffff ffff ffff ffff bf00 00bf +ffff ffff ffbf 0000 0000 0000 0000 0000 +0000 0000 00bf ffff bfbf 0000 bfbf bf7f +0000 0000 00bf ffff bf00 bfff ffff ffff +ffbf 0000 bfbf ffff ffff bfbf 0000 00bf +ffbf ffff bf00 0000 0000 0000 0000 0000 +0000 0000 00bf ffff bf00 bf00 0000 3f7f +0000 0000 0000 5fbf 0000 00bf ffbf 8f5f +3f00 0000 0000 5fbf bf5f 0000 0000 0000 +0000 bf5f 0000 0000 0000 0000 0000 0000 +0000 0000 0000 5fbf 0000 00bf ffbf 8f5f +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +0000 0000 0000 0000 0000 0000 0000 0000 +' + +fn save_raw_data_as_array(buf_bin []byte, file_name string) { +	mut buf := strings.new_builder(buf_bin.len * 5) +	for x in buf_bin { +		buf.write_string('0x${x:02x},') +	} +	os.write_file_array(file_name, buf) or { panic(err) } +} + +fn test_main() { +	mut tf := ttf.TTF_File{} +	$if create_data ? { +		tf.buf = os.read_bytes(font_path) or { panic(err) } +		println('TrueTypeFont file [$font_path] len: $tf.buf.len') +		save_raw_data_as_array(tf.buf, 'test_ttf_Font_arr.bin') +	} $else { +		mut mut_font_bytes := font_bytes +		tf.buf = unsafe { mut_font_bytes.data().vbytes(font_bytes.len) } +	} +	tf.init() +	// println("Unit per EM: $tf.units_per_em") + +	w := 64 +	h := 32 +	bp := 4 +	sz := w * h * bp + +	font_size := 20 +	device_dpi := 72 +	scale := f32(font_size * device_dpi) / f32(72 * tf.units_per_em) + +	mut bmp := ttf.BitMap{ +		tf: &tf +		buf: unsafe { malloc(sz) } +		buf_size: sz +		scale: scale +		width: w +		height: h +	} + +	y_base := int((tf.y_max - tf.y_min) * bmp.scale) +	bmp.clear() +	bmp.set_pos(0, y_base) +	bmp.init_filler() +	bmp.draw_text('Test Text') + +	mut test_buf := get_raw_data(test_data) +	$if create_data ? { +		bmp.save_as_ppm('test_ttf.ppm') +		bmp.save_raw_data('test_ttf.bin') +		test_buf = os.read_bytes('test_ttf.bin') or { panic(err) } +	} + +	ram_buf := bmp.get_raw_bytes() +	assert ram_buf.len == test_buf.len +	for i in 0 .. ram_buf.len { +		if test_buf[i] != ram_buf[i] { +			assert false +		} +	} +} + +fn get_raw_data(data string) []byte { +	mut buf := []byte{} +	mut c := 0 +	mut b := 0 +	for ch in data { +		if ch >= `0` && ch <= `9` { +			b = b << 4 +			b += int(ch - `0`) +			c++ +		} else if ch >= `a` && ch <= `f` { +			b = b << 4 +			b += int(ch - `a` + 10) +			c++ +		} + +		if c == 2 { +			buf << byte(b) +			b = 0 +			c = 0 +		} +	} +	return buf +} diff --git a/v_windows/v/old/vlib/x/ttf/ttf_test_data.bin b/v_windows/v/old/vlib/x/ttf/ttf_test_data.bin Binary files differnew file mode 100644 index 0000000..6d6408c --- /dev/null +++ b/v_windows/v/old/vlib/x/ttf/ttf_test_data.bin  | 
