Decimal Implicit Conversion (Char to Decimal)
[ This article is for Windows Phone 8 developers. If you’re developing for Windows 10, see the latest documentation. ]
Converts a Unicode character to a Decimal.
Assembly: mscorlib (in mscorlib.dll)
Parameters
- value
- Type: System.Char
A Unicode character.
The following code example converts Char values (Unicode characters) to Decimal numbers using the Char to Decimal conversion. This conversion is implicit in C#, but requires the op_Implicit operator in Visual Basic and C++. Implicit conversions to Decimal use other methods in these languages.
// Example of the implicit conversion from char to decimal. using System; class Example { const string formatter = "{0,6}{1,15}{2,10:X8}{3,9:X8}{4,9:X8}{5,9:X8}"; // Convert the char argument and display the decimal value. public static void DecimalFromChar(System.Windows.Controls.TextBlock outputBlock, char argument) { decimal decValue; int[] bits; // Display the decimal and its binary representation. decValue = argument; bits = decimal.GetBits(decValue); outputBlock.Text += String.Format(formatter, argument, decValue, bits[3], bits[2], bits[1], bits[0]) + "\n"; } public static void Demo(System.Windows.Controls.TextBlock outputBlock) { outputBlock.Text += String.Format( "This example of the implicit conversion from char to " + "decimal generates the \nfollowing output. It displays " + "the decimal value and its binary representation.\n") + "\n"; outputBlock.Text += String.Format(formatter, "char", "decimal value", "bits[3]", "bits[2]", "bits[1]", "bits[0]") + "\n"; outputBlock.Text += String.Format(formatter, "----", "-------------", "-------", "-------", "-------", "-------") + "\n"; // Convert char values and display the results. DecimalFromChar(outputBlock, '\0'); DecimalFromChar(outputBlock, ' '); DecimalFromChar(outputBlock, '*'); DecimalFromChar(outputBlock, 'A'); DecimalFromChar(outputBlock, 'a'); DecimalFromChar(outputBlock, '{'); DecimalFromChar(outputBlock, 'Æ'); } } /* This example of the implicit conversion from char to decimal generates the following output. It displays the decimal value and its binary representation. char decimal value bits[3] bits[2] bits[1] bits[0] ---- ------------- ------- ------- ------- ------- 0 00000000 00000000 00000000 00000000 32 00000000 00000000 00000000 00000020 * 42 00000000 00000000 00000000 0000002A A 65 00000000 00000000 00000000 00000041 a 97 00000000 00000000 00000000 00000061 { 123 00000000 00000000 00000000 0000007B Æ 198 00000000 00000000 00000000 000000C6 */
Show: