ASCII To Decimal

This algorithm converts ASCII code to decimal numbers.



									/*****Please include following header files*****/
// stdint.h
/***********************************************/

char* AppendString(const char* str1, const char* str2) {
	int str1Len = strlen(str1);
	int str2Len = strlen(str2);
	int strLen = str1Len + str2Len + 1;
	char* str = malloc(strLen);

	for (int i = 0; i < str1Len; i++)
		str[i] = str1[i];

	for (int i = 0; i < str2Len; i++)
		str[(str1Len + i)] = str2[i];

	str[strLen - 1] = '\0';

	return str;
}

char* GetSubString(char* str, int index, int count) {
	int strLen = strlen(str);
	int lastIndex = index + count;

	if (index >= 0 && lastIndex > strLen) return "";

	char* subStr = malloc(count + 1);

	for (int i = 0; i < count; i++) {
		subStr[i] = str[index + i];
	}

	subStr[count] = '\0';

	return subStr;
}

char* InsertString(char* str, int index, char* subStr) {
	char* s = GetSubString(str, 0, index);
	s = AppendString(s, subStr);
	s = AppendString(s, GetSubString(str, index, strlen(str) - index));

	return s;
}

char* ASCIIToDecimal(char* str) {
	char* dec = "";
	int strLen = strlen(str);

	for (int i = 0; i < strLen; ++i)
	{
		char* cDec = malloc(11);
		_itoa(str[i], cDec, 10);
		int cDecLen = strlen(cDec);

		if (cDecLen < 3)
			for (size_t j = 0; j < (3 - cDecLen); j++)
				cDec = InsertString(cDec, 0, "0");

		dec = AppendString(dec, cDec);
	}

	return dec;
}
								


Example

									char* data = "Programming Algorithms";
char* value = ASCIIToDecimal(data);
								


Output

									080114111103114097109109105110103032065108103111114105116104109115